repo_name
stringlengths 5
114
| repo_url
stringlengths 24
133
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| branch_name
stringclasses 209
values | visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 9.83k
683M
⌀ | star_events_count
int64 0
22.6k
| fork_events_count
int64 0
4.15k
| gha_license_id
stringclasses 17
values | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_language
stringclasses 115
values | files
listlengths 1
13.2k
| num_files
int64 1
13.2k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
NickMcGettigan/CS446Group | https://github.com/NickMcGettigan/CS446Group | 7043f79c0bca16a2ca5c72fb73d722dbdfb33d4a | 591a7c1a255b6f71a4db1804c4ce5dbb4b65b2f9 | 029db6501e6976c6c41416b4185cea106c69e9fe | refs/heads/master | 2021-04-04T11:23:40.504030 | 2020-03-19T19:36:57 | 2020-03-19T19:36:57 | 248,452,649 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4991680383682251,
"alphanum_fraction": 0.5357736945152283,
"avg_line_length": 23.059999465942383,
"blob_id": "84b2724283511a62b88e6cfdcbfc27edc923a811",
"content_id": "efe98b02a403c110ea26e42e882bfd02e0127f99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1202,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 50,
"path": "/kmeans.py",
"repo_name": "NickMcGettigan/CS446Group",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n# CS446 Group Project \n# Nick McGettigan\n\nimport numpy\nimport matplotlib.pyplot as plt\nfrom sklearn.cluster import KMeans\n\nCLUSTERS=3\nFILENAME = \"stop_instances_1336.csv\"\n\ndef k_means():\n data = numpy.loadtxt(open(FILENAME, \"rb\"), delimiter=',', skiprows=1)\n #plt.scatter(data[:,0], data[:,1])\n #plt.savefig(\"test1.png\")\n\n # n_init = number of runs\n km = KMeans(n_clusters=CLUSTERS, \n init='random', \n n_init=10, \n max_iter=300, \n tol=1e-04, \n random_state=0)\n y_km = km.fit_predict(data)\n\n # for i in range(CLUSTERS):\n # plt.scatter(\n # data[y_km == i, 0], data[y_km == i, 1],\n # s=50, c='lightgreen',\n # marker='s', edgecolor='black',\n # label='cluster ' + str(i)\n # )\n\n\n # plot the centroids\n plt.scatter(\n km.cluster_centers_[:, 0], km.cluster_centers_[:, 1],\n s=250, marker='*',\n c='red', edgecolor='black',\n label='centroids'\n )\n plt.ylim((-122.525,-122.550))\n plt.legend(scatterpoints=1)\n plt.grid()\n plt.savefig(\"test1.png\")\n print(km.cluster_centers_)\n\nif __name__ == \"__main__\":\n k_means()"
},
{
"alpha_fraction": 0.6499045491218567,
"alphanum_fraction": 0.7262889742851257,
"avg_line_length": 24.354839324951172,
"blob_id": "9c78f62996a6198c30dcf460a45a54beb13ae5a3",
"content_id": "d83133e27d57c55d5aa95ca66ab2f0d77355d67d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1571,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 62,
"path": "/hc_2.py",
"repo_name": "NickMcGettigan/CS446Group",
"src_encoding": "UTF-8",
"text": "# Marcus Kwong, Portland State University\n# Adv ML\n# hierarchical clustering experiment 1\n\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.preprocessing import normalize\nimport scipy.cluster.hierarchy as shc\nfrom sklearn.cluster import AgglomerativeClustering\n\nGOOD_DATA = \"stop_instances_102.csv\"\nBAD_DATA = \"stop_instances_3179.csv\"\nUGLY_DATA = \"stop_instances_1336.csv\"\nTEST_DATA = \"wcd.csv\"\n\nSTOP102 = (45.6343,\t-122.531487)\nSTOP1336 = (45.684727, -122.660873)\nSTOP3179 = (45.531506, -122.656174)\n\n\n#data = pd.read_csv(GOOD_DATA)\n#data = pd.read_csv(BAD_DATA)\ndata = pd.read_csv(UGLY_DATA)\n#data = pd.read_csv(TEST_DATA)\ndata.head()\n\n#plt.figure(figsize=(10, 7)) \n#plt.scatter(data['latitude'],data['longitude']) \n#plt.show()\n\n\"\"\"\ndata_scaled = normalize(data)\ndata_scaled = pd.DataFrame(data_scaled)\ndata_scaled.head()\n\"\"\"\n\nplt.figure(figsize=(10, 7)) \nplt.title(\"Dendrograms\") \ndend = shc.dendrogram(shc.linkage(data, method='ward'))\nplt.axhline(y=0.00015, color='r', linestyle='--')\n#plt.show()\n\n# apply HC for 2 clusters\ncluster = AgglomerativeClustering(n_clusters=5, affinity='euclidean', linkage='ward') \ncluster.fit_predict(data)\n\n# apply HC for 2 clusters\ncluster = AgglomerativeClustering(n_clusters=5, affinity='euclidean', linkage='ward') \ncluster.fit_predict(data)\n\n# visualize the 2 clusters\nplt.figure(figsize=(10, 7)) \nplt.scatter(data['latitude'], data['longitude'], c=cluster.labels_)\n\n#plt.scatter(STOP102[0], STOP102[1])\n\nplt.scatter(STOP1336[0], STOP1336[1])\n\n#plt.scatter(STOP3179[0], STOP3179[1])\n\nplt.show()"
}
] | 2 |
mauriciomarinho2019/Teste_detecao2 | https://github.com/mauriciomarinho2019/Teste_detecao2 | 5ae14b4a72f7cfc23840039e5ab2215678527141 | f46f2317167a9d8094c4cd6ad551fdf83e708643 | 3cd3092140f443176dfdd3b2149e9c28f1e04bf1 | refs/heads/main | 2023-08-14T05:55:55.020564 | 2021-09-08T01:06:15 | 2021-09-08T01:06:15 | 404,167,834 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.8181818127632141,
"alphanum_fraction": 0.8333333134651184,
"avg_line_length": 32,
"blob_id": "3bac53f4fc804a644e5fdcda7ea70cb30769bf46",
"content_id": "77aee72a4bf0f729d0e6d4b6ee33646e42be5fef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 68,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 2,
"path": "/README.md",
"repo_name": "mauriciomarinho2019/Teste_detecao2",
"src_encoding": "UTF-8",
"text": "# Teste_detecao2\nDetecção objetos usando biblioteca de Streaming.\n"
},
{
"alpha_fraction": 0.6105819940567017,
"alphanum_fraction": 0.641269862651825,
"avg_line_length": 23.86842155456543,
"blob_id": "d793751fa7a027745146a0cb2057cac268d123eb",
"content_id": "73d29691b54752e6625006a43d1e51dc8cb551c6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 945,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 38,
"path": "/detecta2.py",
"repo_name": "mauriciomarinho2019/Teste_detecao2",
"src_encoding": "UTF-8",
"text": "#importa biblioteca de streeming#\n\nfrom flask_opencv_streamer.streamer import Streamer\n\n#importa biblioteca de deteccao objetos em movimento#\nfrom cvlib.object_detection import draw_bbox\n\n\nimport cv2\nimport cvlib as cv\n\n# http://insecam.org/en/view/912189/#\n\nport = 9092\nrequire_login = False\nstreamer = Streamer(port, require_login)\n\nendpoint_cam = \"http://189.131.16.109:84/mjpg/video.mjpg\" # ok \n\ndef main():\n video_capture = cv2.VideoCapture(endpoint_cam)\n\n while True:\n _, frame = video_capture.read()\n bbox, label, conf = cv.detect_common_objects(frame, confidence=0.50, model='yolov4')\n print(bbox,label)\n for tag in label:\n if tag == \"person\":\n frame = draw_bbox(frame, bbox, label, conf)\n \n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n\n cv2.waitKey(30) \n\nif __name__ == \"__main__\":\n main()\n"
}
] | 2 |
ranjananubhav7/Password-manager | https://github.com/ranjananubhav7/Password-manager | 793b88ef9c1ad611743bb23a0dc970035b9b8c29 | 855a3c1bd6274c99beb2fe3536c1e3c110153bab | 157680fe093f7231d0b64342395c2398cccddeb3 | refs/heads/main | 2023-08-20T11:06:50.900369 | 2021-10-08T15:11:28 | 2021-10-08T15:11:28 | 363,719,219 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5198432207107544,
"alphanum_fraction": 0.5443410277366638,
"avg_line_length": 31.396825790405273,
"blob_id": "2b2f29defeee90bb5abb4102455f4d9106e8ed6e",
"content_id": "b602762e95825b22ce3e33e09a82de8807a10252",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4082,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 126,
"path": "/main.py",
"repo_name": "ranjananubhav7/Password-manager",
"src_encoding": "UTF-8",
"text": "from tkinter import *\nimport random\nimport json\nfrom tkinter import messagebox\n\nwindow = Tk()\n# ---------------------------- PASSWORD GENERATOR ------------------------------- #\n\n\ndef passwordGenerator():\n letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u',\n 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',\n 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']\n numbers = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n symbols = ['!', '#', '$', '%', '&', '(', ')', '*', '+']\n\n nr_letters = random.randint(8, 10)\n nr_symbols = random.randint(2, 4)\n nr_numbers = random.randint(2, 4)\n\n password_list = []\n\n for char in range(nr_letters):\n password_list.append(random.choice(letters))\n\n for char in range(nr_symbols):\n password_list += random.choice(symbols)\n\n for char in range(nr_numbers):\n password_list += random.choice(numbers)\n\n random.shuffle(password_list)\n password = \"\"\n for char in password_list:\n password += char\n entry3.delete(0, 'end')\n entry3.insert(0, password)\n window.clipboard_clear()\n window.clipboard_append(password)\n\n\n# ---------------------------- SAVE PASSWORD ------------------------------- #\n\n\ndef savePassword():\n website = entry1.get()\n email = entry2.get()\n password = entry3.get()\n new_data = {\n website: {\n \"email\": email,\n \"password\": password,\n }\n }\n\n if len(website) == 0 or len(password) == 0:\n messagebox.showinfo(title=\"Oops\", message=\"Please make sure you haven't left any fields empty.\")\n else:\n try:\n with open(\"data.json\", \"r\") as data_file:\n # Reading old data\n data = json.load(data_file)\n except FileNotFoundError:\n with open(\"data.json\", \"w\") as data_file:\n json.dump(new_data, data_file, indent=4)\n else:\n # Updating old data with new data\n data.update(new_data)\n\n with open(\"data.json\", \"w\") as data_file:\n # Saving updated data\n json.dump(data, data_file, indent=4)\n finally:\n entry1.delete(0, END)\n entry3.delete(0, END)\n entry1.focus()\n\n# ---------------------------- FIND WEBSITE ------------------------------- #\n\n\ndef find_website():\n website = entry1.get()\n try:\n with open(\"data.json\") as data_file:\n data = json.load(data_file)\n except FileNotFoundError:\n messagebox.showinfo(title=\"Error\", message=\"No Data File Found.\")\n else:\n if website in data:\n email = data[website][\"email\"]\n password = data[website][\"password\"]\n messagebox.showinfo(title=website, message=f\"Email: {email}\\nPassword: {password}\")\n else:\n messagebox.showinfo(title=\"Error\", message=f\"No details for {website} exists.\")\n\n# ---------------------------- UI SETUP ------------------------------- #\n\n\nwindow.title(\"Password manager\")\nwindow.config(padx=20, pady=20)\ncanvas = Canvas(width=200, height=200, highlightthickness=0)\nimage = PhotoImage(file=\"logo.png\")\ncanvas.create_image(100, 100, image=image)\nlabel1 = Label(text=\"Website:\")\nlabel2 = Label(text=\"Email/Username:\")\nlabel3 = Label(text=\"Password:\")\nbutton1 = Button(text=\"Generate Password\", command=passwordGenerator)\nbutton2 = Button(text=\"Add\", width=36, command=savePassword)\nfind_website_button = Button(text=\"Find\", command=find_website)\nentry1 = Entry(width=35)\nentry1.focus()\nentry2 = Entry(width=35)\nentry2.insert(0, \"[email protected]\")\nentry3 = Entry(width=21)\nlabel1.grid(row=1, column=0)\nlabel2.grid(row=2, column=0)\nlabel3.grid(row=3, column=0)\nentry1.grid(row=1, column=1)\nfind_website_button.grid(row=1, column=2)\nentry2.grid(row=2, column=1, columnspan=2)\nentry3.grid(row=3, column=1)\nbutton1.grid(row=3, column=2)\nbutton2.grid(row=4, column=1, columnspan=2)\ncanvas.grid(row=0, column=1)\n\nwindow.mainloop()\n"
},
{
"alpha_fraction": 0.7114093899726868,
"alphanum_fraction": 0.7224352955818176,
"avg_line_length": 33.16393280029297,
"blob_id": "7dadc08ae858b2b4a0da20d703d0069e56be1cac",
"content_id": "75330a755feb3e8221fb4d88e56ab8dd7296bf4a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4172,
"license_type": "no_license",
"max_line_length": 176,
"num_lines": 122,
"path": "/README.md",
"repo_name": "ranjananubhav7/Password-manager",
"src_encoding": "UTF-8",
"text": "\n\n\n\n<!-- PROJECT LOGO -->\n<br />\n<p align=\"center\">\n\n <h3 align=\"center\">PASSWORD MANAGER</h3>\n\n <p align=\"center\">\n A python based application for creating and managing secure passwords. \n <br />\n <br />\n </p>\n</p>\n\n\n\n<!-- TABLE OF CONTENTS -->\n<details open=\"open\">\n <summary>Table of Contents</summary>\n <ol>\n <li>\n <a href=\"#about-the-project\">About The Project</a>\n <ul>\n <li><a href=\"#built-with\">Built With</a></li>\n </ul>\n </li>\n <li>\n <a href=\"#getting-started\">Getting Started</a>\n <ul>\n <li><a href=\"#prerequisites\">Prerequisites</a></li>\n <li><a href=\"#installation\">Installation</a></li>\n </ul>\n </li>\n <li><a href=\"#contributing\">Contributing</a></li>\n <li><a href=\"#contact\">Contact</a></li>\n </ol>\n</details>\n\n\n\n<!-- ABOUT THE PROJECT -->\n## About The Project\n\nThis is a python based application to create and manage secure password for any field. The application using has a simple UI but it is intended to have a more user friendly UI.\n\nHere's why:\n* This project will help you to improve your basic understanding of python development, tkinter, json and creating logic for applications. \n* It will also help you learn how to build an effective UI/UX for your own projects.\n* The tech stack used is very beginner friendly.\n\n\nA list of commonly used resources that I find helpful are listed in the acknowledgements.\n\n### Built With\n\nThe tech stacks used in the projects are :\n* [TKINTER](https://docs.python.org/3/library/tkinter.html#:~:text=The%20tkinter%20package%20(%E2%80%9CTk%20interface,well%20as%20on%20Windows%20systems.)\n* [Json](https://docs.python.org/3/library/json.html)\n\n\n\n<!-- GETTING STARTED -->\n## Getting Started\n\nYou can use the application in your local system by following the steps mentioned below :\n\n### Prerequisites\n\nThis is an example of how to list things you need to use the software and how to install them.\n\n### Installation\n\n1. Open terminal in your project's folder\n2. Clone the repo\n ```sh\n git clone https://github.com/ranjananubhav7/Password-manager.git\n ```\n3. Open pycharm or any text editor \n4. Run the program in the text editor environment\n\n\n\n<!-- CONTRIBUTING -->\n## Contributing\n\nContributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.\n\n1. Fork the Project\n2. Create your Feature Branch (`git checkout -b feature/AmazingFeature`)\n3. Commit your Changes (`git commit -m 'Add some AmazingFeature'`)\n4. Push to the Branch (`git push origin feature/AmazingFeature`)\n5. Open a Pull Request\n\n\n\n<!-- CONTACT -->\n## Contact\n\nYour Name - [Anubhav Ranjan](https://www.linkedin.com/in/anubhav-ranjan-223b73129/) - [email protected]\n\nProject Link: [https://github.com/your_username/repo_name](https://github.com/ranjananubhav7/Weather-Report)\n\n\n\n\n\n\n\n<!-- MARKDOWN LINKS & IMAGES -->\n<!-- https://www.markdownguide.org/basic-syntax/#reference-style-links -->\n[contributors-shield]: https://img.shields.io/github/contributors/othneildrew/Best-README-Template.svg?style=for-the-badge\n[contributors-url]: https://github.com/othneildrew/Best-README-Template/graphs/contributors\n[forks-shield]: https://img.shields.io/github/forks/othneildrew/Best-README-Template.svg?style=for-the-badge\n[forks-url]: https://github.com/othneildrew/Best-README-Template/network/members\n[stars-shield]: https://img.shields.io/github/stars/othneildrew/Best-README-Template.svg?style=for-the-badge\n[stars-url]: https://github.com/othneildrew/Best-README-Template/stargazers\n[issues-shield]: https://img.shields.io/github/issues/othneildrew/Best-README-Template.svg?style=for-the-badge\n[issues-url]: https://github.com/othneildrew/Best-README-Template/issues\n[license-shield]: https://img.shields.io/github/license/othneildrew/Best-README-Template.svg?style=for-the-badge\n[license-url]: https://github.com/othneildrew/Best-README-Template/blob/master/LICENSE.txt\n[linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=for-the-badge&logo=linkedin&colorB=555\n[linkedin-url]: https://linkedin.com/in/othneildrew\n[product-screenshot]: images/screenshot.png\n"
}
] | 2 |
TeleginAnton/PythonPY100 | https://github.com/TeleginAnton/PythonPY100 | 71dc1252bed0f085d69b539ae217fd51fa7ceac7 | 910cd58fa2fe54fe90c072ea3f647e36a11becd1 | 67438c8cc15d7433dc0a88984bc0a437659534f9 | refs/heads/master | 2023-08-20T20:14:15.139166 | 2021-10-17T15:05:35 | 2021-10-17T15:05:35 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.71875,
"alphanum_fraction": 0.71875,
"avg_line_length": 31,
"blob_id": "9e0b7441fad825563a41ad719b4bb42abaa12a8f",
"content_id": "75df31fb67061e505f784eb2625baa19a5418b92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 32,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 1,
"path": "/Занятие2/Практические_задания/task2_4/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "not True or (True and not True)\n"
},
{
"alpha_fraction": 0.4893617033958435,
"alphanum_fraction": 0.5148935914039612,
"avg_line_length": 22.600000381469727,
"blob_id": "712658152af76b9276e6923a4227b5fffba71f5f",
"content_id": "e9567d8ac98979ff9e427144fd3e5edd8607560b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 293,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 10,
"path": "/Занятие2/Лабораторные_задания/task1_3/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# TODO\na = int(input('Число A: '))\nb = int(input('число b: '))\n\nif a ** 2 + b ** 2 > (a + b) ** 2:\n print('суммв квадратов больше')\nelif a ** 2 + b * 2 < (a + b) ** 2:\n print(\"Квадрат суммы больше\")\nelse:\n print('Суммы равны')"
},
{
"alpha_fraction": 0.6449275612831116,
"alphanum_fraction": 0.6594203114509583,
"avg_line_length": 16.125,
"blob_id": "852b6a27c8d4f83b5033f331c7dd316a0ac72c55",
"content_id": "c2aafa698c76b32997271517df79d37c287b78f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 166,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 8,
"path": "/Занятие1/Лабораторные задания/task1_2/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# Напишите ваше решение\nbet = 13\n\nsalary = int(input('указать ЗП: '))\n\ntax_p = salary / bet\nresult = salary - tax_p\nprint(tax_p, result)\n\n"
},
{
"alpha_fraction": 0.6390374302864075,
"alphanum_fraction": 0.644385039806366,
"avg_line_length": 36.400001525878906,
"blob_id": "8ceb5bd38f79e40516ccd8500fa07b722b989998",
"content_id": "ac8241a7d26788155d8905b7876e9a6c52ff8162",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 502,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 10,
"path": "/Занятие2/Лабораторные_задания/task2_4/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "if __name__ == \"__main__\":\nstr_=\n hi = len(list('Hello,world'))\n phrase = 'Hello,world'\n count_stairs = list(range(4, hi + 1))\n for index in count_stairs:\n print(' ' * index, phrase)\n # постарайтесь не использовать \"магические\" числа,\n # а по возможности дать переменным осмысленные названия и использовать их\n # TODO Распечатать строку лесенкой\n"
},
{
"alpha_fraction": 0.7804877758026123,
"alphanum_fraction": 0.7804877758026123,
"avg_line_length": 40,
"blob_id": "f74b2e2fffe3fe067ce1d4c33f4ee03f79cfcf0a",
"content_id": "8731a45c490161d2d96e40a895beac878137cc6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 41,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 1,
"path": "/Занятие2/Практические_задания/task2_5/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "not False and True or False and not True\n"
},
{
"alpha_fraction": 0.5192307829856873,
"alphanum_fraction": 0.5705128312110901,
"avg_line_length": 18.5,
"blob_id": "d259f17dd0f9ff21b0c62c920a202465008a77a9",
"content_id": "33d5e5b5012a5bbd947bc42b40045b8bb13333fa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 191,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 8,
"path": "/Занятие2/Лабораторные_задания/task1_1/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# TODO\na = int(input('Число '))\n\nresult = a % 2 == 0 and a % 3 == 0\nif result:\n print('Число равное 2 или 3')\nelse:\n print('Число не равное 2 или 3')\n"
},
{
"alpha_fraction": 0.672897219657898,
"alphanum_fraction": 0.6915887594223022,
"avg_line_length": 20.600000381469727,
"blob_id": "c5768eaefbccf74f421f05a77575087505955783",
"content_id": "f340d3b311d01c86b9fd65cba4ad264046b055dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 141,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 5,
"path": "/Занятие1/Домашнее_задание/task2/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# Напишите ваше решение\ntask = 10\n\ntime_task = int(input('Указать время мин.: '))\nresult = task / time_task"
},
{
"alpha_fraction": 0.5905511975288391,
"alphanum_fraction": 0.5905511975288391,
"avg_line_length": 24.600000381469727,
"blob_id": "cab01f2db46be2f8a9c887535e2287cc8d0e07d5",
"content_id": "10d60f9a7ebaf2cdb994494866a6f8e9239c6d6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 163,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 5,
"path": "/Занятие1/Домашнее_задание/task3/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# Напишите ваше решение\na = int(input('Число а: '))\nb = int(input('Число b: '))\nc = int(input('Число с: '))\nprint(min(a, b, c))"
},
{
"alpha_fraction": 0.5454545617103577,
"alphanum_fraction": 0.5714285969734192,
"avg_line_length": 21.14285659790039,
"blob_id": "9afefeb63e5a93dee8ea6560fc600b28398d8e6d",
"content_id": "ffa6c0f6c11737a3dd894ca198b3ee6079352592",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 190,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 7,
"path": "/Занятие2/Лабораторные_задания/task1_2/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# TODO\na = int(input('введите число a: '))\nb = int(input('введите число b: '))\n\nresult = a % 2 == 1 and b % 2 == 1\nif result:\n print('результат а и б')"
},
{
"alpha_fraction": 0.8039215803146362,
"alphanum_fraction": 0.8039215803146362,
"avg_line_length": 75.5,
"blob_id": "efab60a302594f8bcc24db5567f396c3c0be4330",
"content_id": "d562c87010bd5f06d3f24db0945fb4596df3a739",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 208,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 2,
"path": "/Занятие1/Практические_задания/task1_1/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "my_variable = 'Hello World' # TODO строковая переменная Hello World\nprint(my_variable)# TODO с помощью функции print распечатать переменную my_variable\n"
},
{
"alpha_fraction": 0.5355648398399353,
"alphanum_fraction": 0.6234309673309326,
"avg_line_length": 25.66666603088379,
"blob_id": "b6c0177a253da1b0ddbbfb7076ecf6cd73d889e7",
"content_id": "0363388e0948af2fe7cc14e0127b323889264326",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 303,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 9,
"path": "/Занятие1/Домашнее_задание/task1/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "# Напишите ваше решение\n\nspeed = int(input('Скорость передачи данных: '))\ncoast = int(input('Стоимомть: '))\ntime = int(input('Врмя скачивания '))\na = speed * 1024\nb = (time / a) * 1024 * 1024 * 1024\nc = (b - (1024 * 3)) * coast\nprint(b, c)"
},
{
"alpha_fraction": 0.7127659320831299,
"alphanum_fraction": 0.7234042286872864,
"avg_line_length": 39.28571319580078,
"blob_id": "35c8be07cf221f4d0a508841ba746ad0691e9c9a",
"content_id": "8b21df2935dd484b05a6ad49d8b50ffd634a78e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 386,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 7,
"path": "/Занятие1/Лабораторные задания/task1_1/main.py",
"repo_name": "TeleginAnton/PythonPY100",
"src_encoding": "UTF-8",
"text": "DAYS_OF_YEAR = 365 # количество дней в году\n\nstart_year = int(input('Год родения: ')) # TODO запросить у пользователя год рождения\ncurrent_year = int(input('Текущий год:')) # TODO запросить у пользователя текущий год\n\ndyas = (start_year - current_year) * DAYS_OF_YEAR\nprint(dyas)\n"
}
] | 12 |
cms-sw/ib-scheduler | https://github.com/cms-sw/ib-scheduler | a05738a63dc64a57f27dec379c68458291d50149 | 5de7de4d87f15e99e82c4654996a52c6c8a9c635 | 7a8c5e44c5d4a8161d8b2399ef65d7b4dbbb233e | refs/heads/master | 2021-06-01T13:50:53.156779 | 2018-01-31T15:45:40 | 2018-01-31T15:45:40 | 9,659,398 | 0 | 0 | null | 2013-04-24T23:01:57 | 2013-10-22T08:53:48 | 2013-10-22T08:53:48 | Python | [
{
"alpha_fraction": 0.6939163208007812,
"alphanum_fraction": 0.6939163208007812,
"avg_line_length": 36.57143020629883,
"blob_id": "69aa359f8c94ab44126f65f8c72439fbec07cc1f",
"content_id": "dd28bf6ef9b29e21986d09c31751a256823d756b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 526,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 14,
"path": "/buildRequestAPI.py",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport ws_sso_content_reader\nDEFAULT_TC_URL = \"https://eulisse.web.cern.ch/eulisse/cgi-bin/git-collector/buildrequests\"\n\ndef setTCBaseURL(url):\n DEFAULT_TC_URL = url\n\ndef call(method, obj, **kwds):\n if method == \"GET\":\n opts = urlencode(kwds)\n return loads(ws_sso_content_reader.getContent(join(tcBaseURL, obj) + \"?\" + opts, None, method))\n elif method in [\"POST\", \"PATCH\", \"DELETE\"]:\n opts = dumps(kwds)\n return loads(ws_sso_content_reader.getContent(join(tcBaseURL, obj), opts, method))\n"
},
{
"alpha_fraction": 0.7225433588027954,
"alphanum_fraction": 0.7225433588027954,
"avg_line_length": 23.714284896850586,
"blob_id": "ab49e146acc58afda5b138841a274b50e7aceb39",
"content_id": "0409b52dce6f20d3da02619c18ae4528f01dc4d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 173,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 7,
"path": "/stressTestGit.sh",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/bin/sh -ex\n\nWORKDIR=/build/cmsbuild/git-stress-test\nrm -rf $WORKDIR\nmkdir -p $WORKDIR\ncd $WORKDIR\ntime git clone --bare --mirror https://:@git.cern.ch/kerberos/CMSSW.git\n"
},
{
"alpha_fraction": 0.7071269750595093,
"alphanum_fraction": 0.7672605514526367,
"avg_line_length": 98.77777862548828,
"blob_id": "7e0cdebfa4e53aaec5ea3459fd23600cbae981e6",
"content_id": "88403027086880cb80870a51b1c7e057e52b49cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 898,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 9,
"path": "/mirrorGit.sh",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\nMIRROR=/afs/cern.ch/cms/git-cmssw-mirror\nCERN_REPO=https://:@git.cern.ch/kerberos\ncd $MIRROR/cmssw.git ; git config http.postBuffer 209715200 ; git remote update origin ; git push --mirror $CERN_REPO/CMSSW.git\ncd $MIRROR/cmsdist.git ; git config http.postBuffer 209715200 ; git remote update origin ; git push --mirror $CERN_REPO/CMSDIST.git\ncd $MIRROR/pkgtools.git ; git config http.postBuffer 209715200 ; git remote update origin ; git push --mirror $CERN_REPO/PKGTOOLS.git\ncd $MIRROR/cmssw-config.git ; git config http.postBuffer 209715200 ; git remote update origin ; git push --mirror $CERN_REPO/CMSSW/config.git\ncd $MIRROR/SCRAM.git ; git config http.postBuffer 209715200 ; git remote update origin ; git push --mirror $CERN_REPO/SCRAM.git\ncd $MIRROR/ib-scheduler.git ; git config http.postBuffer 209715200 ; git remote update origin ; git push --mirror $CERN_REPO/ib-scheduler.git\n"
},
{
"alpha_fraction": 0.5161290168762207,
"alphanum_fraction": 0.5210918188095093,
"avg_line_length": 25.799999237060547,
"blob_id": "e96ea13353e761dedcf061037a79050f299ff5b9",
"content_id": "1125dc0abf52ed47d2e3df70706b940389911ab7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 403,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 15,
"path": "/setup.py",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nfrom distutils.core import setup\nsetup(name='IB Scheduler',\n version='1.0',\n description='CMS IB Utilities',\n author='CMS Collaboration',\n author_email='hn-cms-sw-develtools@@cern.ch',\n url='http://cmssdt.cern.ch',\n py_modules=[\"tagCollectorAPI\",\n \"ws_sso_content_reader\",\n \"all_json\",\n \"Lock\"],\n scripts=['autoIB.py']\n )\n\n"
},
{
"alpha_fraction": 0.7210460901260376,
"alphanum_fraction": 0.7210460901260376,
"avg_line_length": 28.740739822387695,
"blob_id": "5a76d16965bb7b852a0ea22f66a4438fb80986c5",
"content_id": "5acce40e67cc282d1e0769022db5cc0c9ecf8923",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 803,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 27,
"path": "/README.md",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "## What is this\n\nA bunch of script which CMS uses for building integration builds. Scripts\ncontributed by a bunch of different people.\n\n## Deployment\n\nClone the repository and get the secrets:\n\n git clone <repository>\n cd ib-scheduler\n scp -l $CMS_ADMIN_USER lxplus.cern.ch:~/private/cmssw_secrets.py .\n\nwhere `CMS_ADMIN_USER` has to be asked to the relevant persons in CMS. You will\nneed a valid certificate for the CERN SSO login.\n\nTo create a build request do:\n\n ./autoIB.py request -a <architecture> -k -r '<release-tag>' --sync-back --repository \"cms.week@TW\" <package>\n\nto build the same build request:\n\n ./autoIB.py process -j <jobs> --match-arch \"<architecture-reg-exp>\" --top-dir <workdir> --max-load <max-load-average>\n\n## Development\n\nOpen a pull request for each new feature.\n"
},
{
"alpha_fraction": 0.5690959692001343,
"alphanum_fraction": 0.5913978219032288,
"avg_line_length": 35.39130401611328,
"blob_id": "d527f8dc8d37a24aea010024fdaf40c570487bc9",
"content_id": "7391b3e7c6e4712e78ef9bd614ccd68fe57ca19c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2511,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 69,
"path": "/autoCreateIb.py",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# A simple script which creates IBs in git.\nfrom commands import getstatusoutput\nfrom optparse import OptionParser\nfrom datetime import datetime, timedelta\nfrom time import strftime\nimport re\n\ndef expandDates(s):\n today = datetime.today()\n tw=str(int(today.strftime(\"%W\")) % 2)\n nw=str(int((today + timedelta(days=7)).strftime(\"%W\")) % 2)\n pw=str(int((today + timedelta(days=-7)).strftime(\"%W\")) % 2)\n return strftime(s.replace(\"@TW\", tw).replace(\"@NW\", nw).replace(\"@PW\", pw))\n\ndef format(s, **kwds):\n return s % kwds\n\ndef tagRelease(tag, branch, timestamp):\n (day, t) = timestamp.rsplit(\"-\", 1)\n hour = t[0:2] + \":\" + t[2:4]\n \n cmd = format(\"set -e;\"\n \"TEMP=`mktemp -d`;\"\n \"if [ -d /afs/cern.ch/cms/slc5_amd64_gcc472/external/git/1.8.3.1/etc/profile.d/init.sh ]; then\"\n \" source /afs/cern.ch/cms/slc5_amd64_gcc472/external/git/1.8.3.1/etc/profile.d/init.sh;\"\n \"fi;\"\n \"git clone $REFERENCE -b %(branch)s [email protected]:cms-sw/cmssw.git $TEMP/cmssw;\"\n \"cd $TEMP/cmssw;\"\n \"git tag %(tag)s `git rev-list -n 1 --before='%(day)s %(hour)s' %(branch)s`;\"\n \"git push origin --tags;\"\n \"rm -rf $TEMP\",\n day=day,\n hour=hour,\n branch=branch,\n tag=tag)\n err, out = getstatusoutput(cmd)\n if err:\n print \"Error while executing command:\"\n print cmd \n print out\n \n\nif __name__ == \"__main__\":\n parser = OptionParser()\n parser.add_option(\"-b\", \"--base\", help=\"The release branch to use for this.\", default=None, dest=\"base\")\n parser.add_option(\"-D\", \"--date\", help=\"Use this timestamp for the tag.\", default=None, dest=\"timestamp\")\n opts, args = parser.parse_args()\n if len(args) == 0:\n parser.error(\"You need to specify a tag\")\n if len(args) > 1:\n parser.error(\"Too many tags\")\n\n release = expandDates(args[0])\n if not opts.base:\n m = re.match(\"(CMSSW_[0-9]+_[0-9]+).*\", release)\n if not m:\n parser.error(\"Could not determine the release branch, please provide one with -b, --base\")\n opts.base = m.group(1) + \"_X\"\n\n if opts.timestamp:\n opts.timestamp = expandDates(opts.timestamp)\n else:\n m = re.match(\"CMSSW_[0-9]+_[0-9]+_.*?([0-9]{4}-[0-9]{2}-[0-9]{2}-[0-9]{4})$\", release)\n if not m:\n parser.error(\"Could not determine date from release name. Please specify it via -D\")\n opts.timestamp = m.group(1)\n \n tagRelease(release, opts.base, opts.timestamp)\n"
},
{
"alpha_fraction": 0.7410714030265808,
"alphanum_fraction": 0.7410714030265808,
"avg_line_length": 27,
"blob_id": "eb44204e4e48450b6b6a48765ada85a57038942c",
"content_id": "430913d82950787bcf51ab3a59e5fbd2b3ba581b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 336,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 12,
"path": "/all_json.py",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "# Apparently there are many ways to import json, depending on the python\n# version. This should make sure you get one.\ntry:\n from json import loads\n from json import dumps\nexcept:\n try:\n from json import read as loads\n from json import write as dumps\n except:\n from simplejson import loads\n from simplejson import dumps\n"
},
{
"alpha_fraction": 0.7670713663101196,
"alphanum_fraction": 0.7724673748016357,
"avg_line_length": 47.79623794555664,
"blob_id": "4b52ddb6491ee23bd5e9af0f0c08df3676f43889",
"content_id": "2a12c6616cc0d86641b6b0ea650ef50bd9b2a9f3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 15567,
"license_type": "no_license",
"max_line_length": 258,
"num_lines": 319,
"path": "/DESIGN.md",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "# A new build infrastructure\n\n## Design goals and features description\n\nThis is a specification on how the new IB machinery is supposed to work.\n\nThe goal of this design is to provide a simple yet flexible and pluggable way\nof doing build tasks, which are specified in form of a `spec` file present in\n`CMSDIST` plus some additional attributes that specify a few configurable\nparameters (See the section Build Task Specification). `cmsBuild` is used to do\nthe actual building, as usual.\n\nThe apt repository, <http://cmsrep.cern.ch/cmssw/>, is used to host result of\nall build tasks, sometimes in form of RPMS, some other times in form of misc\nfiles under the `WEB` subdirectory of the repository. This most notably allows\ninstallable RPM distributions for Integration Builds (*IBs*).\n\nIn order to prevent bloating of the production `cms` repository, a\ndouble-buffering-like technique, is used where two temporary repositories (e.g.\n`cms.week0`, `cms.week1`) are alternatively reset on bi-weekly basis while the\nsystem populates the other. This effectively provides a two weeks rolling\nwindow which contains the results of build. This is particularly suitable for\nautomating all the tasks related to IBs as the time limited availability of\nthose is generally considered a more than acceptable tradeoff. The size of the\nrolling window is just limited by the amount of space available in the apt\nrepository (2TB at the moment). Normal release builds will use the usual\ncentral `cms` repository.\n\nThe actual build work is carried out by a pool of undifferentiated machines.\nThe machines themselves only need to be a viable CMSSW build machines but apart\nfrom that no machine is actually bound to a given task. \n\nTasks queue in the task queue are all unrelated and can be executed in\nparallel. If tasks require some dependency this is implemented by the mean of\ncontinuations (see below).\n\n## Implementation notes\n\nThe design heavily relies on the fact that parallel uploads to the apt\nrepository can be done in a safe, transactional way and which can be\nrolled-back at any moment. The new upload mechanism, in place since fall last\nyear, guarantees this.\n\nThe tag collector is used to keep track of all the requested build tasks and\neffectively act as a task queue. Those request are marked of type `TASK-<hash>`\ninside the database and they can be observed in the usual [custom\nIB](https://cmstags.cern.ch/tc/#CustomIBs) page. The `<hash>` part is a unique\nidentifier for a class of build tasks, for example building a cmssw release. At\nthe moment this is merely the name of the package being built. \n\nA new script\n[`CmsIntBld/IntBuild/script/autoIB.py`](http://svnweb.cern.ch/world/wsvn/CMSIntBld/trunk/IntBuild/scripts/autoIB.py)\nhas been introduced which can be used to control the whole process of queueing\ntasks in the tag collector task queue as well as listing and processing them.\n\nThe script acts as a driver for different sub-commands, ala cvs or git, which\nthen do the actual job. The list of commands currently supported is:\n\n- `request [options] -r <release-name> -a <architecure> <package>`: enqueue a request to build `<package>` associated to a given `<release-name>` and `<architecture>` pair. The rest of the options allow to better specify what the payload looks like and how \n- `process`: pick a task from the tag queue.\n- `list`: list the pending tasks\n- `cancel <task_id>`: cancel the given `<task_id>`\n\nThis consolidates most of the scripts which were previously being used under a\nsingle script and works for both cases of publishing a build request in tag\ncollector, processing requests on client nodes as well as performing some\nmaintenance tasks.\n\nParticular attention is given to make sure that most of the options match their\n`cmsBuild` counterpart when they are passed on to it.\n\n### Queuing a new task\n\nQueueing a new task is done by the `autoIB.py request` command.\n\nIn general tasks can also be queued manually, however the most common usage is\ndone by `cmsbuild` user's `acrontab` to schedule the IB jobs.\n\nQueueing by hand is nevertheless interesting for the case in which one wants to\ndo a one time build and upload action, e.g. a release, or some new external to\nbe tested before integration in the normal build workflow. \n\n\n#### Basic usage\n\nAll tasks refer to building and uploading some spec file in `CMSDIST`, so that\nadding new tasks is simply a matter of implementing the appropriate spec file.\n\nBesides the name of the spec file to build and upload, one can specify the\nsource repository (e.g. `--repository cms`), the destination repository (e.g.\n`--upload-tmp-repository weekly1` for `cms.weekly1`) and whether or not the\nsystem needs to sync back (via the usual `--sync-back` flag which is there in\n`cmsBuild`). This allows us to control whether or not an upload needs to end up\nin the official repository or in some test one. E.g:\n\n autoIB.py request -a slc5_amd64_gcc472 \\\n -r 'CMSSW_6_2_X_%Y-%m-%d-1400' \\\n --repository cms \\\n --upload-tmp-repository week1 \\\n cmssw\n\nwill queue building of a CMSSW IB and upload it in a temporary repository\ncalled `cms.week1`. Notice that the release name will always be substituted in\nyour `cmssw.spec`, regardless of what package you are building.\n\nA special package called `dummy` which can be used to always build and upload\nsomething. This can be used in the case one needs to create a temporary\nrepository to be populated later.\n\nMoreover all the options are passed through `strftime` and a filter which\nreplaces the special strings `@TW` and `@NW` to refer to the week number in the\nyear, current week and next week respectively, modulo 2. This is useful to\nalternate repositories every other week.\n\nA few other build options which are commonly available in cmsBuild are\navailable:\n\n- `--ignore-compilation-error` or `-k` which can be handy to create IB packages.\n\n#### Specifying continuations\n\nIt is sometimes desirable to schedule tasks only after the successful\ncompletion of another, possibly unrelated, task.\n\nThis is supported by `autoIB.py` via the `--continuations` option.\n\nSuch an option takes a comma separated list of pairs `<package>:<architecture>`\nto be scheduled after the completion of the task they are attached to. All the\nother options in the task will stay the same and the tasks specified will be\nexecuted in parallel.\n\nIn case there is need for continuations of continuations they can be specified\nby adding one or more additional list, separating them via a semi-colon. E.g.:\n\n cmssw:slc5_amd64_gcc472,cmssw:slc6_amd64_gcc472;\\\n cmssw-validation:slc5_amd64_gcc472,cmssw-extra-tests:slc6_amd64_gcc472,\\\n dummy:slc7_amd64_gcc500\n \nin this case the additional continuations will be scheduled added only to the\ntask which have a matching architecture. In the above case this means:\n\n- building `cmssw` for both `slc5_amd64_gcc472` and `slc6_amd64_gcc472` will be\n scheduled once the main task completes.\n- `cmssw-validation` will be run as a continuation of the `slc5` `cmssw`, while\n `cmssw-extra-tests` will be scheduled when the `slc6` build completes.\n `dummy` will not be scheduled because there is no initial continuation which\n uses its architecture.\n\nBy exploiting the usage of the `%cmsroot/WEB/` area provided by the new upload\nmechanism of `cmsBuild`, rpm building can be used to publish result of complex\nprocessing tasks on the web. For example those coming from running release\nvalidation steps and so on. This will hopefully consolidate all the different\n\"post-build\" steps which we currently have into one single kind of workflow.\n\n#### Overriding contents of CMSDIST / PKGTOOLS\n\nSometimes it's useful for testing purposes to be able to override `CMSDIST` and\n/ or `PKGTOOLS` tags or even single files into `CMSDIST`.\n\nOverriding tags can be done by using the `--cmsdist TAG` and `--pkgtools TAG`\noptions. By default both tags are assumed to be in the official `cms-sw` git\nrepositories, this can be changed by using the github syntax for private repositories, e.g.:\n\n --cmsdist <user>:<branch-or-tag>\n\nIt is also possible to specify a few special keywords which get substituted before\nsubmitting the requets. These are:\n\n* `@ARCH`: the architecture, as specified by the `-a` option.\n* `@RELEASE`: the release name, as specified by the `-r` option.\n* `@QUEUE`: the queue associated to the release, i.e. the `CMSSW_x_y_X`.\n\n### Processing tasks\n\nThe tasks queued by the `request` sub-command described above can be processed\nusing the `process` sub-command. Such a command takes two options\n`--match-arch` and `--match-release` which can be used to restrict the kind of\ntasks that it will process.\n\nAn additional option `--top-dir <top-dir>` can be used to specify in which\ndirectory the processing will happen. There a directory which has the same name\nas the request id in tag collector will contain a checkout of the requested\n`PKGTOOLS` / `CMSDIST` instances.\n\n`PKGTOOLS` will use `<top-dir>/b` as its own workdir, while the log file of the\nprocess will be found in `<top-dir>/log.<request_id>`. The autoIB command will\nalso take care of synchronizing those log files and those coming from the build\nitself into to `http://cmssdt.cern.ch/SDT/tc-ib-logs/<machine-name>`. The tasks\nin tag collector will be updated so that they will point to their own log.\n\nA few other build options which are commonly present in `cmsBuild` are also\navailable:\n\n- `--builders` which can be used to specify how many packages build in parallel\n in one single task.\n- `--jobs` or `-j` which specifies how many threads to use to build a single\n package\n\n#### Opportunistic resource usage\n\nSome machine are available for users to so some occasional heavy duty tasks,\nhowever most of the machines are idle for large part of the time.\n\n`autoIB.py` provides means to use those machines in an opportunistic way via\nthe `--max-load <load>` option which can be used to avoid processing a task\nwhile the machine is being used by someone else and its loads exceeds the\n`<load>` specified.\n\nFuture extensions of the system might include time based limitations of on the\nkind of task being executed, i.e. more powerful machines will do more time\nconsuming jobs.\n\nIn order to avoid that critical tasks are executed on machines which are for\nopportunistic usage, or simply to key a given task to a certain sub-set of\nmachines, it is possible to specify the `--hostname-filter <REGEX>` option,\nwhich will make sure that the hostname of the builder matches `REGEX`.\n\n#### Post-processing logs\n\nTwo kind of build logs are available when building a package. The first one is\nthe log of `cmsBuild` process itself and then there is the `rpmbuild` build\nlogs for each one of the packages being built. The former is stored in\n`<top-dir>/log.<request-id>` while those of the latter kind are found in the\nusual `<top-dir>/b/BUILD/<architecture>/<group>/<package>/<version>/log`\nlocation for a given package.\n\nEvery time `autoIB.py process` is invoked, those logs are synchronized to a web\ndirectory, so that one can follow the progress of a build from the web. Links\nto the logs are updated in tag collector as soon as they are available and\npresented in the [task report view](https://cmstags.cern.ch/tc/#CustomIBs). \n\nThis is done by a `syncLogs.py` script which is invoked at the beginning of\nevery `autoIB.py` process, regardless or not if there is another process\nalready running. This allows having an almost live updating of logs.\n\nThe same helper script takes care of improving the logs so that they are more\nbrowser friendly. Additional decorations to the logs can be put there but keep\nin mind that such a decoration is run every single time `autoIB.py process` is\nrun, so it needs to be as quick as possible to avoid interfering with the\nnormal processing of tasks. However, given the design decision of processing\nall the tasks using `cmsBuild`, a better approach to this would be to have an\nHTML mode for its logs so that they can be immediately used.\n\n### Deployment\n\n#### Deploying server\n\nThere is no server component specific to the new IB infrastructure. The new\ndesign depends on Tag Collector and the apt server, but those are supposed to\nbe already existing and working regardless of how we build IBs.\n\n#### Deploying clients\n\nDeploying clients is as easy as checking out the `CMSIntBld` from svn and\nadding the appropriate line in the `crontab` / `acrontab`. The user performing\nthe build should have `cmsbuild` password-less certificate and key in\n`~/.globus` and its `id_dsa` in `~/.ssh` so that it can get payloads from\n`cmstags.cern.ch` in an authenticated manner and upload results to\n`cmsrep.cern.ch`. For the same reason the firewall of the build machine should\nallow reaching the two above mentioned servers.\n\n### Security issues.\n\nSince the service allows building RPMs remotely and uploading them to a web\nserver, security must be a major concern.\n\nAll the user provided options are considered unsafe by the `autoIB.py process`\nwhich provides to sanitize all of them as they come from the payload.\n\nMoreover due to the fact that tasks are specified as `spec` files in `CMSDIST`\nmalicious payloads have to be committed there first, which is only possible for\na restricted number of cms users.\n\nThe build jobs themselves, run as the unprivileged `cmsbuild` user.\n\nFinally only CMS SSO authenticated users can submit build jobs. \n\n## A working testbed.\n\nA working testbed of the IB infrastructure is currently being put in place. It\nuses the production tag collector, the [cmssdt website](http://cmssdt.cern.ch)\nand two temporary repositories `cms.week1` and `cms.week0` to run a number of\nIBs and related tasks. In particular:\n\n- The two repositories contain bi-daily RPMs for the the `CMSSW_6_2_X` IB, on\n `slc5_amd64_gcc472`.\n- A short matrix is run at the completion of the builds. Its results are\n published here: <http://cmsrep.cern.ch/cmssw/cms.week1/WEB/ib-results/>\n- For linux machines, `acrontab` is currently used to schedule builds. Current\n testbed description can be found\n [here](http://svnweb.cern.ch/world/wsvn/CMSIntBld/trunk/deploy/cms-ib-sched/acrontab).\n New infrastructure is defined at the end.\n- Examples on how to use a `CMSSW` build to run some tests and publish results\n can be found\n [here](http://cmssw.cvs.cern.ch/cgi-bin/cmssw.cgi/COMP/CMSDIST/cmssw-validation.spec?revision=1.17&view=markup).\n\n### Installing releases on AFS\n\nInstalling releases on `AFS` is done via the\n`CmsIntBld/scripts/autoInstaller.sh` script. Such a script takes care of\ninstalling releases in `/afs/cern.ch/cms/sw/ReleaseCandidates/vol{0,1}` from\nthe `cms.week{0,1}` repositories. The script takes care of cleaning and\nbootstrapping the area every week, alternating: one week `vol0` and the other\n`vol1`.\n\n## Known issues with the current specification and its implementation\n\n- Multiple continuations are not implemented yet. Only one level of\n continuations works correctly.\n- Currently the build area is scratched every time. While this is not a big\n deal, it could result in high-loads on `cmsrep.cern.ch` when N IBs start at\n the same time. We should get a list of all the package which were built,\n delete them and try to install them from server.\n- Find a better name for `autoIB.py`\n- Describe the naming of tasks in tag collector.\n- Add paragraph on stats about the system, SLOC, etc.\n- Add paragraph about future work.\n- Restrict ability to submit build jobs to only certain privileged users? Allow\n `autoIb.py` to only process requests coming from certain users? \n"
},
{
"alpha_fraction": 0.6844097971916199,
"alphanum_fraction": 0.6899777054786682,
"avg_line_length": 42.59223175048828,
"blob_id": "2da98b2bf9d1ac7e41d0f8102d8be762ab92254c",
"content_id": "6050ad9a0eefa7d4883f203ffb8a91ee93b11304",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4490,
"license_type": "no_license",
"max_line_length": 157,
"num_lines": 103,
"path": "/ws_sso_content_reader.py",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n###Description: The tool reads cern web services behind SSO using user certificates\nimport os, urllib, urllib2, httplib, cookielib, sys, HTMLParser, re\nfrom optparse import OptionParser\nfrom os.path import expanduser, dirname, realpath\nfrom logging import debug, error, warning, DEBUG\nimport logging\n\nDEFAULT_CERT_PATH=\"~/.globus/usercert.pem\"\nDEFAULT_KEY_PATH=\"~/.globus/userkey.pem\"\n\ndef setDefaultCertificate(cert, key):\n DEFAULT_CERT_PATH=cert\n DEFAULT_KEY_PATH=key\n\nclass HTTPSClientAuthHandler(urllib2.HTTPSHandler): \n def __init__(self): \n urllib2.HTTPSHandler.__init__(self) \n self.key = realpath(expanduser(DEFAULT_KEY_PATH))\n self.cert = realpath(expanduser(DEFAULT_CERT_PATH))\n\n def https_open(self, req): \n return self.do_open(self.getConnection, req) \n\n def getConnection(self, host, timeout=300): \n return httplib.HTTPSConnection(host, key_file=self.key, cert_file=self.cert)\n\ndef _getResponse(opener, url, data=None, method=\"GET\"):\n request = urllib2.Request(url)\n if data:\n request.add_data(data)\n if method != \"GET\":\n request.get_method = lambda : method\n response = opener.open(request)\n debug(\"Code: %s\\n\" % response.code)\n debug(\"Headers: %s\\n\" % response.headers)\n debug(\"Msg: %s\\n\" % response.msg)\n debug(\"Url: %s\\n\" % response.url)\n return response\n\ndef getSSOCookie(opener, target_url, cookie):\n opener.addheaders = [('User-agent', 'curl-sso-certificate/0.0.2')] #in sync with cern-get-sso-cookie tool\n # For some reason before one needed to have a parent url. Now this does not seem to be the case anymore... \n #parentUrl = \"/\".join(target_url.split(\"/\", 4)[0:5]) + \"/\"\n parentUrl = target_url\n print parentUrl\n url = urllib2.unquote(_getResponse(opener, parentUrl).url)\n content = _getResponse(opener, url).read()\n ret = re.search('<form .+? action=\"(.+?)\">', content)\n if ret == None:\n raise Exception(\"error: The page doesn't have the form with adfs url, check 'User-agent' header\")\n url = urllib2.unquote(ret.group(1))\n h = HTMLParser.HTMLParser()\n post_data_local = []\n for match in re.finditer('input type=\"hidden\" name=\"([^\"]*)\" value=\"([^\"]*)\"', content):\n post_data_local += [(match.group(1), h.unescape(match.group(2)))]\n \n if not post_data_local:\n raise Exception(\"error: The page doesn't have the form with security attributes, check 'User-agent' header\")\n _getResponse(opener, url, urllib.urlencode(post_data_local)).read()\n\ndef getContent(target_url, post_data=None, method=\"GET\"):\n cert_path = expanduser(DEFAULT_CERT_PATH)\n key_path = expanduser(DEFAULT_KEY_PATH)\n cookie = cookielib.CookieJar()\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie), HTTPSClientAuthHandler())\n debug(\"The return page is sso login page, will request cookie.\")\n hasCookie = False\n # if the access gave an exception, try to get a cookie\n try:\n getSSOCookie(opener, target_url, cookie)\n hasCookie = True \n result = _getResponse(opener, target_url, post_data, method).read()\n finally:\n if hasCookie:\n try:\n _getResponse(opener, \"https://login.cern.ch/adfs/ls/?wa=wsignout1.0\").read()\n except:\n error(\"Error, could not logout correctly from server\") \n return result\n\nif __name__ == \"__main__\":\n parser = OptionParser(usage=\"%prog [-d(ebug)] -o(ut) COOKIE_FILENAME -c(cert) CERN-PEM -k(ey) CERT-KEY -u(rl) URL\") \n parser.add_option(\"-d\", \"--debug\", dest=\"debug\", help=\"Enable pycurl debugging. Prints to data and headers to stderr.\", action=\"store_true\", default=False)\n parser.add_option(\"-p\", \"--postdata\", dest=\"postdata\", help=\"Data to be sent as post request\", action=\"store\", default=None)\n parser.add_option(\"-m\", \"--method\", dest=\"method\", help=\"Method to be used for the request\", action=\"store\", default=\"GET\")\n parser.add_option(\"-c\", \"--cert\", dest=\"cert_path\", help=\"Absolute path to cert file.\", action=\"store\", default=DEFAULT_CERT_PATH)\n parser.add_option(\"-k\", \"--key\", dest=\"key_path\", help=\"Absolute path to key file.\", action=\"store\", default=DEFAULT_KEY_PATH)\n (opts, args) = parser.parse_args()\n if not len(args) == 1:\n parser.error(\"Please specify a URL\")\n url = args[0]\n if opts.debug:\n logging.getLogger().setLevel(DEBUG)\n if opts.postdata == \"-\":\n opts.postdata = sys.stdin.read()\n try:\n setDefaultCertificate(opts.cert_path, opts.key_path)\n content = getContent(url, opts.postdata, opts.method)\n except urllib2.HTTPError, e:\n print e\n content = \"\"\n print content\n"
},
{
"alpha_fraction": 0.6533961892127991,
"alphanum_fraction": 0.6566102504730225,
"avg_line_length": 46.7198371887207,
"blob_id": "d9038829852230a54d1f7110ff494cd4af76326f",
"content_id": "27eef4593fb1fae785da17f2cd5f316b921dfbfa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 23335,
"license_type": "no_license",
"max_line_length": 285,
"num_lines": 489,
"path": "/autoIB.py",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# This script allows you to execute various misc test to automate IB building\n# steps, in particular:\n#\n# - Reset the weekly repository.\n# - Build and upload externals in the weekly repository.\n# - Build and upload ibs in the weekly repository.\n#\nfrom optparse import OptionParser\nimport buildRequestAPI as api\nimport sys, os, socket\nfrom urllib2 import urlopen\nfrom urllib import urlencode\nimport xml.parsers.expat\nfrom commands import getstatusoutput\nfrom getpass import getuser\nfrom time import strftime\nfrom os.path import abspath, join, dirname, exists, expanduser\nimport re\nfrom Lock import Lock\nfrom datetime import datetime, timedelta\nimport ws_sso_content_reader\nscriptPath = os.path.dirname( os.path.abspath(sys.argv[0]) )\nif scriptPath not in sys.path:\n sys.path.append(scriptPath)\n\nfrom all_json import loads, dumps\n\nDEFAULT_API_URL = \"https://cmsgit.web.cern.ch/cmsgit/buildrequests\"\n\ndef setTCUrl(url):\n global DEFAULT_API_URL\n DEFAULT_API_URL = url\n\ndef call(obj, method, **kwds):\n obj = str(obj).strip(\"/\")\n print obj,\":\", method\n print kwds\n if method == \"GET\":\n opts = urlencode(kwds)\n result = ws_sso_content_reader.getContent(join(DEFAULT_API_URL, obj) + \"?\" + opts, None, method)\n elif method in [\"POST\", \"PATCH\", \"DELETE\"]:\n opts = dumps(kwds)\n result = ws_sso_content_reader.getContent(join(DEFAULT_API_URL, obj), opts, method)\n print result\n return loads(result)\n\ntry:\n from hashlib import sha1 as sha\n def hash(s):\n return sha(s).hexdigest()\nexcept ImportError:\n import sha\n def hash(s):\n return sha.new(s).hexdigest()\n\ndef overloaded(maxLoad):\n err,out = getstatusoutput(\"uptime | sed -e 's/^.* //'\")\n if err:\n return False \n return float(out) > float(maxLoad)\n\n# Replace @TW with the week number, modulo 2\n# Replace @NW with the week number, modulo 2\n# Replace @PW with the week number, modulo 2\ndef expandDates(s):\n today = datetime.today()\n tw=str(int(today.strftime(\"%W\")) % 2)\n nw=str(int((today + timedelta(days=7)).strftime(\"%W\")) % 2)\n pw=str(int((today + timedelta(days=-7)).strftime(\"%W\")) % 2)\n return strftime(s.replace(\"@TW\", tw).replace(\"@NW\", nw).replace(\"@PW\", pw))\n\ndef expandRelease(s, release):\n # The queue is always CMSSW_x_y_X\n queue = re.sub(\"(CMSSW_[0-9]+_[0-9]+).*\", \"\\\\1_X\", release)\n s = s.replace(\"@RELEASE\", release)\n s = s.replace(\"@QUEUE\", queue)\n return s\n\n# Sanitized caracters which could possibly allow execution of unwanted\n# commands.\ndef sanitize(s):\n if not s:\n return \"\"\n return re.sub(\"[.]/\", \".\", re.sub(\"[^0-9a-zA-Z_,:./-]\", \"\", s))\n \ndef format(s, **kwds):\n return s % kwds\n\ndef die(s):\n print s\n sys.exit(1)\n\nEXTERNAL_INFO_URL=\"https://raw.github.com/cms-sw/cmsdist/IB/%s/stable/config.map\"\n# Get external information from github.\n# See http://cms-sw.github.io/cmsdist/ \n# for the format of the config.map file.\ndef getExternalsTags(release_queue, architecture):\n # Get the mapping between architecture and release\n url = EXTERNAL_INFO_URL % release_queue\n try:\n data = urlopen(url).read()\n except:\n die(\"Unable to find CMSDIST information for release queue %s.\" % release_queue)\n lines = [x.strip().split(\";\") for x in data.split(\"\\n\") if x.strip()]\n archInfo = {}\n for line in lines:\n parts = dict(x.split(\"=\") for x in line)\n if not \"SCRAM_ARCH\" in parts:\n die(\"Bad file format for config.map\")\n if parts[\"SCRAM_ARCH\"] == architecture:\n archInfo = dict(parts)\n break\n if not archInfo.get(\"CMSDIST_TAG\", None) or not archInfo.get(\"PKGTOOLS_TAG\", None):\n die(format(\"Could not find architecture %(architecture)s for release series %(release_queue)s.\\n\"\n \"Please update `config.map' file in the CMSDIST branch IB/%(release_queue)s/stable\",\n release_queue=release_queue,\n architecture=architecture))\n return {\"PKGTOOLS\": archInfo[\"PKGTOOLS_TAG\"],\n \"CMSDIST\": archInfo[\"CMSDIST_TAG\"]}\n\ndef process():\n # Get the first task from the list\n # Check if we know what to do\n # Mark it as started\n # Start doing it\n parser = OptionParser(usage=\"%prog process [options]\")\n parser.add_option(\"--match-arch\", metavar=\"REGEX\", dest=\"matchArch\", help=\"Limit architectures to those matching REGEX\", default=\".*\")\n parser.add_option(\"--match-release\", metavar=\"REGEX\", dest=\"matchRelease\", help=\"Limit releases to those matching REGEX\", default=\".*\")\n parser.add_option(\"--work-dir\", \"--top-dir\", metavar=\"PATH\", dest=\"workdir\", help=\"Work dir where processing happens\", default=None)\n parser.add_option(\"--jobs\", \"-j\", type=\"int\", metavar=\"N\", dest=\"jobs\", help=\"Number of parallel building threads\", default=1)\n parser.add_option(\"--builders\", type=\"int\", metavar=\"N\", dest=\"builders\", help=\"Number of packages built in parallel\", default=1)\n parser.add_option(\"--debug\", metavar=\"PATH\", dest=\"debug\", help=\"Print out what's happening\", action=\"store_true\", default=False)\n parser.add_option(\"--dry-run\", \"-n\", metavar=\"BOOL\", dest=\"dryRun\", help=\"Do not execute\", action=\"store_true\", default=False)\n parser.add_option(\"--api-url\", metavar=\"URL\", dest=\"apiUrl\", help=\"Specify API endpoint URL\", default=DEFAULT_API_URL)\n parser.add_option(\"--max-load\", type=\"int\", metavar=\"LOAD\", dest=\"maxLoad\", help=\"Do not execute if average last 15 minutes load > LOAD\", default=8)\n opts, args = parser.parse_args()\n setTCUrl(opts.apiUrl)\n if not opts.workdir:\n print \"Please specify a workdir\"\n sys.exit(1)\n\n if exists(\"/etc/iss.nologin\"):\n print \"/etc/iss.nologin found. Not doing anything and waiting for machine out of maintainance mode.\"\n sys.exit(1)\n opts.workdir = abspath(opts.workdir)\n thisPath=dirname(__file__)\n getstatusoutput(format(\n \"%(here)s/syncLogs.py %(workdir)s\",\n here=thisPath, \n workdir=opts.workdir))\n lockPath = join(opts.workdir, \"cms\", \".cmsLock\")\n lock = Lock(lockPath, True, 60*60*12)\n if not lock:\n if opts.debug:\n print \"Lock found in %s\" % lockPath\n sys.exit(1)\n lock.__del__()\n \n if overloaded(opts.maxLoad):\n print \"Current load exceeds maximum allowed of %s.\" % opts.maxLoad\n sys.exit(1)\n tasks = call(\"/\", \"GET\", \n release_match=opts.matchRelease,\n architecture_match=opts.matchArch,\n state=\"Pending\")\n print tasks\n if not len(tasks):\n if opts.debug:\n print \"Nothing to be done which matches release %s and architecture %s\" % (opts.matchArch, opts.matchRelease)\n sys.exit(1)\n # Look up for a hostname-filter option in the payload and if it is there,\n # make sure we match it.\n runnableTask = None\n for task in tasks:\n if not \"payload\" in task:\n continue\n if re.match(task[\"payload\"].get(\"hostnameFilter\", \".*\"), socket.gethostname()):\n runnableTask = task\n break\n if not runnableTask:\n print \"Nothing to be done on this machine.\"\n sys.exit(1)\n # Default payload options.\n payload = {\"debug\": False}\n payload.update(runnableTask[\"payload\"])\n\n # We can now specify tags in the format repository:tag to pick up branches\n # from different people.\n payload[\"pkgtools_remote\"] = \"cms-sw\"\n payload[\"cmsdist_remote\"] = \"cms-sw\"\n if \":\" in payload[\"PKGTOOLS\"]:\n payload[\"pkgtools_remote\"], payload[\"PKGTOOLS\"] = payload[\"PKGTOOLS\"].split(\":\", 1)\n if \":\" in payload[\"CMSDIST\"]:\n payload[\"cmsdist_remote\"], payload[\"CMSDIST\"] = payload[\"CMSDIST\"].split(\":\", 1)\n \n if opts.dryRun:\n print \"Dry run. Not building\"\n sys.exit(1)\n\n ok = call(runnableTask[\"id\"], \"PATCH\", \n url=\"http://cmssdt.cern.ch/SDT/tc-ib-logs/%s/log.%s.html\" % (socket.gethostname(), runnableTask[\"id\"]),\n machine=socket.gethostname(),\n pid=os.getpid(),\n state=\"Running\")\n if not ok:\n print \"Could not change request %s state to building\" % runnableTask[\"id\"] \n sys.exit(1)\n \n # Build the package.\n # We gracefully handle any exception (broken pipe, ctrl-c, SIGKILL)\n # by failing the request if they happen. We also always cat \n # the log for this build in a global log file.\n log = \"\"\n getstatusoutput(format(\n \"echo 'Log not sync-ed yet' > %(workdir)s/log.%(task_id)s;\\n\"\n \"%(here)s/syncLogs.py %(workdir)s\",\n task_id=runnableTask[\"id\"],\n here=thisPath, \n workdir=opts.workdir))\n try:\n print \"Building...\"\n error, log = getstatusoutput(format(\"set -e ;\\n\"\n \"mkdir -p %(workdir)s/%(task_id)s ;\\n\"\n \"export CMS_PATH=%(workdir)s/cms ;\\n\"\n \"cd %(workdir)s ;\\n\"\n \"( echo 'Building %(package)s using %(cmsdistRemote)s:%(cmsdistTag)s';\\n\"\n \" rm -rf %(task_id)s;\\n\"\n \" git clone git://github.com/%(cmsdistRemote)s/cmsdist.git %(task_id)s/CMSDIST || git clone https://:@git.cern.ch/kerberos/CMSDIST.git %(task_id)s/CMSDIST;\\n\"\n \" pushd %(task_id)s/CMSDIST; git checkout %(cmsdistTag)s; popd;\\n\"\n \" PKGTOOLS_TAG=\\\"`echo %(pkgtoolsTag)s | sed -e's/\\\\(V[0-9]*-[0-9]*\\\\).*/\\\\1-XX/'`\\\";\\n\"\n \" git clone git://github.com/%(pkgtoolsRemote)s/pkgtools.git %(task_id)s/PKGTOOLS || git clone https://:@git.cern.ch/kerberos/PKGTOOLS.git %(task_id)s/PKGTOOLS;\\n\"\n \" pushd %(task_id)s/PKGTOOLS; git checkout $PKGTOOLS_TAG; popd;\\n\"\n \" echo \\\"### RPM cms dummy `date +%%s`\\n%%prep\\n%%build\\n%%install\\n\\\" > %(task_id)s/CMSDIST/dummy.spec ;\\n\"\n \" set -x ;\\n\"\n \" rm -rf %(workdir)s/cms %(workdir)s/b ;\\n\"\n \" perl -p -i -e 's/### RPM cms cmssw.*/### RPM cms cmssw %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw.spec ;\\n\"\n \" perl -p -i -e 's/### RPM cms cmssw-ib .*/### RPM cms cmssw-ib %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw-ib.spec ;\\n\"\n \" perl -p -i -e 's/### RPM cms cmssw-qa .*/### RPM cms cmssw-qa %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw-qa.spec ;\\n\"\n \" perl -p -i -e 's/### RPM cms cmssw-validation .*/### RPM cms cmssw-validation %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw-validation.spec ;\\n\"\n \" perl -p -i -e 's/### RPM cms cmssw-patch.*/### RPM cms cmssw-patch %(real_release_name)s/' %(task_id)s/CMSDIST/cmssw-patch.spec ;\\n\"\n \" %(workdir)s/%(task_id)s/PKGTOOLS/cmsBuild %(debug)s --new-scheduler --cmsdist %(workdir)s/%(task_id)s/CMSDIST %(ignoreErrors)s --builders %(builders)s -j %(jobs)s --repository %(repository)s --architecture %(architecture)s --work-dir %(workdir)s/cms build %(package)s ;\\n\"\n \" %(workdir)s/%(task_id)s/PKGTOOLS/cmsBuild %(debug)s --new-scheduler --cmsdist %(workdir)s/%(task_id)s/CMSDIST --repository %(repository)s --upload-tmp-repository %(tmpRepository)s %(syncBack)s --architecture %(architecture)s --work-dir %(workdir)s/cms upload %(package)s ;\\n\"\n \" PKG_BUILD=`find %(workdir)s/cms/RPMS/%(architecture)s -name \\\"*%(package)s*\\\"| sed -e's|.*/||g;s|-1-1.*||g'`;\\n\"\n \" set +x ;\\n\"\n \" echo Build completed. you can now install the package built by doing: ;\\n\"\n \" echo \\\"wget http://cmsrep.cern.ch/cmssw/cms/bootstrap.sh\\\" ;\\n\"\n \" echo \\\"sh -x ./bootstrap.sh setup -path w -arch %(architecture)s -r %(repository)s >& bootstrap_%(architecture)s.log \\\";\\n\"\n \" echo \\\"(source w/%(architecture)s/external/apt/*/etc/profile.d/init.sh ; apt-get install $PKG_BUILD )\\\" ;\\n\"\n \" echo AUTOIB SUCCESS) 2>&1 | tee %(workdir)s/log.%(task_id)s\",\n workdir=opts.workdir,\n debug=payload[\"debug\"] == True and \"--debug\" or \"\",\n cmsdistTag=sanitize(payload[\"CMSDIST\"]),\n pkgtoolsTag=sanitize(payload[\"PKGTOOLS\"]),\n cmsdistRemote=sanitize(payload[\"cmsdist_remote\"]),\n pkgtoolsRemote=sanitize(payload[\"pkgtools_remote\"]),\n architecture=sanitize(runnableTask[\"architecture\"]),\n release_name=sanitize(re.sub(\"_[A-Z]+_X\", \"_X\", runnableTask[\"release\"])),\n base_release_name=re.sub(\"_[^_]*patch[0-9]*$\", \"\", sanitize(payload[\"release\"])),\n real_release_name=sanitize(payload[\"release\"]),\n package=sanitize(payload[\"package\"]),\n repository=sanitize(payload[\"repository\"]),\n syncBack=payload[\"syncBack\"] == True and \"--sync-back\" or \"\",\n ignoreErrors=payload[\"ignoreErrors\"] == True and \"-k\" or \"\",\n tmpRepository=sanitize(payload[\"tmpRepository\"]),\n task_id=runnableTask[\"id\"],\n jobs=opts.jobs,\n builders=opts.builders))\n getstatusoutput(format(\"echo 'Task %(task_id)s completed successfully.' >> %(workdir)s/log.%(task_id)s\",\n workdir=opts.workdir,\n task_id=runnableTask[\"id\"]))\n except Exception, e:\n log = open(format(\"%(workdir)s/log.%(task_id)s\", workdir=opts.workdir, task_id=runnableTask[\"id\"])).read()\n log += \"\\nInterrupted externally.\"\n log += str(e)\n getstatusoutput(format(\"echo 'Interrupted externally' >> %(workdir)s/log.%(task_id)s\",\n workdir=opts.workdir,\n task_id=runnableTask[\"id\"]))\n \n error, saveLog = getstatusoutput(format(\"set -e ;\\n\"\n \"echo '#### Log file for %(task_id)s' >> %(workdir)s/log ;\\n\"\n \"cat %(workdir)s/log.%(task_id)s >> %(workdir)s/log\",\n workdir=opts.workdir,\n task_id=runnableTask[\"id\"]))\n \n getstatusoutput(\"%s/syncLogs.py %s\" % (thisPath, opts.workdir))\n if not \"AUTOIB SUCCESS\" in log:\n call(runnableTask[\"id\"], \"PATCH\", \n state=\"Failed\", \n url=\"http://cmssdt.cern.ch/SDT/tc-ib-logs/%s/log.%s.html\" % (socket.gethostname(), runnableTask[\"id\"] ))\n print log\n print saveLog\n sys.exit(1)\n \n call(runnableTask[\"id\"], \"PATCH\", \n state=\"Completed\", \n url=\"http://cmssdt.cern.ch/SDT/tc-ib-logs/%s/log.%s.html\" % (socket.gethostname(), runnableTask[\"id\"]))\n\n # Here we are done processing the job. Now schedule continuations.\n if not \"continuations\" in payload:\n sys.exit(0)\n continuationsSpec = payload[\"continuations\"] or \"\"\n continuations = [x for x in continuationsSpec.split(\";\")]\n if len(continuations) == 0:\n sys.exit(0)\n \n if len(continuations) != 1:\n print \"WARNING: multiple continuations not supported yet\"\n \n if opts.debug:\n print continuations\n nextTasks = [p.split(\":\", 1) for p in continuations[0].split(\",\") if \":\" in p]\n \n for package, architecture in nextTasks:\n options = {}\n # Notice that continuations will not support overriding CMSDIST and\n # PKGTOOLS completely.\n # \n # We do not want that because there could be cases where\n # the first step is done for one architecture, while the second \n # step is done for another.\n options[\"PKGTOOLS\"] = sanitize(payload[\"PKGTOOLS\"])\n options[\"CMSDIST\"] = sanitize(payload[\"CMSDIST\"])\n # For the moment do not support continuations of continuations.\n options[\"continuations\"] = \"\"\n options.update(getExternalsTags(expandRelease(\"@QUEUE\", payload[\"release\"]), architecture))\n call(\"\", \"POST\",\n release=sanitize(payload[\"release\"]),\n architecture=sanitize(architecture),\n repository=sanitize(payload[\"repository\"]),\n tmpRepository=sanitize(payload[\"tmpRepository\"]),\n syncBack=payload[\"syncBack\"],\n debug=payload[\"debug\"],\n ignoreErrors=payload[\"ignoreErrors\"],\n package=sanitize(package),\n PKGTOOLS=options[\"PKGTOOLS\"],\n CMSDIST=options[\"CMSDIST\"],\n continuations=options[\"continuations\"]\n )\n\ndef listTasks():\n # Get the first task from the list\n # Check if we know what to do\n # Mark it as started\n # Start doing it\n parser = OptionParser(usage=\"%prog list [options]\")\n parser.add_option(\"--match-arch\", metavar=\"REGEX\", dest=\"matchArch\", help=\"Limit architectures to those matching REGEX\", default=\".*\")\n parser.add_option(\"--match-release\", metavar=\"REGEX\", dest=\"matchRelease\", help=\"Limit releases to those matching REGEX\", default=\".*\")\n parser.add_option(\"--state\", metavar=\"Running,Pending,Completed,Failed\", dest=\"state\", help=\"Show requests in the given state\", default=\"Running\")\n parser.add_option(\"--format\", metavar=\"FORMAT\", dest=\"format\", help=\"Output format\", default=\"%i: %p %r %a\")\n parser.add_option(\"--api-url\", metavar=\"URL\", dest=\"apiUrl\", help=\"Specify API endpoint\", default=DEFAULT_API_URL)\n opts, args = parser.parse_args()\n setTCUrl(opts.apiUrl)\n results = call(\"/\", \"GET\", \n release_match=opts.matchRelease,\n architecture_match=opts.matchArch,\n state=opts.state)\n if not results:\n sys.exit(1)\n replacements = [(\"i\", \"id\"),\n (\"p\", \"package\"),\n (\"a\", \"architecture\"),\n (\"r\", \"release\"),\n (\"s\", \"state\")]\n opts.format = opts.format.replace(\"%\", \"%%\")\n for x, y in replacements:\n opts.format = opts.format.replace(\"%%\" + x, \"%(\" + y + \")s\")\n results = [x.update(x[\"payload\"]) or x for x in results]\n print \"\\n\".join([opts.format % x for x in results])\n\n\n# This will request to build a package in the repository.\n# - Setup a few parameters for the request\n# - Get PKGTOOLS and CMSDIST from TC if they are not passed\n# - Create the request.\ndef requestBuildPackage():\n parser = OptionParser()\n parser.add_option(\"--release\", \"-r\", metavar=\"RELEASE\", dest=\"release\", help=\"Specify release.\", default=None)\n parser.add_option(\"--architecture\", \"-a\", metavar=\"ARCHITECTURE\", dest=\"architecture\", help=\"Specify architecture\", default=None)\n parser.add_option(\"--repository\", \"-d\", metavar=\"REPOSITORY NAME\", dest=\"repository\", help=\"Specify repository to use for bootstrap\", default=\"cms\")\n parser.add_option(\"--upload-tmp-repository\", metavar=\"REPOSITORY SUFFIX\", dest=\"tmpRepository\", help=\"Specify repository suffix to use for upload\", default=getuser())\n parser.add_option(\"--pkgtools\", metavar=\"TAG\", dest=\"pkgtools\", help=\"Specify PKGTOOLS version to use. You can specify <user>:<tag> to try out a non official tag.\", default=None)\n parser.add_option(\"--cmsdist\", metavar=\"TAG\", dest=\"cmsdist\", help=\"Specify CMSDIST tag branch to use. You can specify <user>:<tag> to try out a non official tag.\", default=None)\n parser.add_option(\"--hostname-filter\", metavar=\"HOSTNAME-REGEX\", dest=\"hostnameFilter\", help=\"Specify a given regular expression which must be matched by the hostname of the builder machine.\", default=\".*\")\n parser.add_option(\"--sync-back\", metavar=\"BOOL\", dest=\"syncBack\", action=\"store_true\", help=\"Specify whether or not to sync back the repository after upload\", default=False)\n parser.add_option(\"--ignore-compilation-errors\", \"-k\", metavar=\"BOOL\", dest=\"ignoreErrors\", help=\"When supported by the spec, ignores compilation errors and still packages the available build products\", action=\"store_true\", default=False)\n parser.add_option(\"--api-url\", metavar=\"url\", dest=\"apiUrl\", help=\"Specify the url for the API\", default=DEFAULT_API_URL)\n parser.add_option(\"--continuations\", metavar=\"SPEC\", dest=\"continuations\", help=\"Specify a comma separated list of task:architecture which need to be scheduled after if this task succeeds\", default=\"\")\n parser.add_option(\"--debug\", metavar=\"BOOL\", dest=\"debug\", help=\"Add cmsbuild debug information\", action=\"store_true\", default=False)\n parser.add_option(\"--dry-run\", \"-n\", metavar=\"BOOL\", dest=\"dryRun\", help=\"Do not push the request to tag collector\", action=\"store_true\", default=False)\n opts, args = parser.parse_args()\n if len(args) != 2:\n parser.error(\"You need to specify a package\")\n setTCUrl(opts.apiUrl)\n\n if not opts.repository:\n parser.error(\"Please specify a repository\")\n if not opts.release:\n parser.error(\"Please specify a release\")\n if not opts.architecture:\n parser.error(\"Please specify an architecture\")\n\n options = {}\n options[\"hostnameFilter\"] = opts.hostnameFilter\n options[\"release\"] = expandDates(opts.release)\n options[\"release_queue\"] = expandRelease(\"@QUEUE\", options[\"release\"])\n options[\"architecture\"] = opts.architecture\n options[\"repository\"] = expandRelease(expandDates(opts.repository).replace(\"@ARCH\", options[\"architecture\"]), options[\"release\"])\n options[\"tmpRepository\"] = expandDates(opts.tmpRepository)\n options[\"syncBack\"] = opts.syncBack\n options[\"package\"] = expandDates(args[1])\n options[\"continuations\"] = opts.continuations.replace(\"@ARCH\", options[\"architecture\"])\n\n options[\"ignoreErrors\"] = opts.ignoreErrors\n options[\"debug\"] = opts.debug\n\n if opts.cmsdist and opts.continuations:\n print format(\"WARNING: you have specified --pkgtools to overwrite the PKGTOOLS tag coming from tag collector.\\n\"\n \"However, this will happen only for %(package)s, continuations will still fetch those from the tagcolletor.\", package=options[\"package\"])\n\n if opts.cmsdist and opts.continuations:\n print format(\"WARNING: you have specified --cmsdist to overwrite the PKGTOOLS tag coming from tag collector.\\n\"\n \"However, this will happen only for %(package)s, continuations will still fetch those from the tagcolletor.\", package=options[\"package\"])\n\n # Get the mapping between architecture and release\n options.update(getExternalsTags(options[\"release_queue\"], options[\"architecture\"]))\n \n if opts.pkgtools:\n options[\"PKGTOOLS\"] = sanitize(expandRelease(opts.pkgtools, options[\"release\"]).replace(\"@ARCH\", options[\"architecture\"]))\n if opts.cmsdist:\n options[\"CMSDIST\"] = sanitize(expandRelease(opts.cmsdist, options[\"release\"]).replace(\"@ARCH\", options[\"architecture\"]))\n if not options.get(\"CMSDIST\"):\n print \"Unable to find CMSDIST for releases %s on %s\" % (options[\"release\"], options[\"architecture\"])\n sys.exit(1)\n if not options.get(\"PKGTOOLS\"):\n print \"Unable to find PKGTOOLS for releases %s on %s\" % (options[\"release\"], options[\"architecture\"])\n sys.exit(1)\n if opts.dryRun:\n print \"Dry run specified, the request would look like:\\n %s\" % str(options)\n sys.exit(1)\n call(\"\", \"POST\", **options)\n\ndef cancel():\n parser = OptionParser(usage=\"%prog cancel <request-id>\")\n parser.add_option(\"--api-url\", metavar=\"url\", dest=\"apiUrl\", help=\"Specify the url for the API\", default=DEFAULT_API_URL)\n opts, args = parser.parse_args()\n setTCUrl(opts.apiUrl)\n if not len(args):\n print \"Please specify a request id.\"\n ok = call(args[1], \"DELETE\")\n if not ok:\n print \"Error while cancelling request %s\" % args[1]\n sys.exit(1)\n\ndef reschedule():\n parser = OptionParser(usage=\"%prog reschedule <request-id>\")\n parser.add_option(\"--api-url\", metavar=\"url\", dest=\"apiUrl\", help=\"Specify the url for the API\", default=DEFAULT_API_URL)\n opts, args = parser.parse_args()\n setTCUrl(opts.apiUrl)\n if not len(args):\n print \"Please specify a request id.\"\n ok = call(args[1], \"PATCH\",\n pid=\"\",\n machine=\"\",\n url=\"\",\n state=\"Pending\")\n if not ok:\n print \"Error while rescheduling request %s\" % args[1]\n sys.exit(1)\n\n\nCOMMANDS = {\"process\": process, \n \"cancel\": cancel,\n \"list\": listTasks,\n \"request\": requestBuildPackage,\n \"reschedule\": reschedule\n }\n\nif __name__ == \"__main__\":\n os.environ[\"LANG\"] = \"C\"\n commands = [x for x in sys.argv[1:] if not x.startswith(\"-\")]\n if len(commands) == 0 or not commands[0] in COMMANDS.keys():\n print \"Usage: autoIB.py <command> [options]\\n\"\n print \"Where <command> can be among the following:\\n\"\n print \"\\n\".join(COMMANDS.keys())\n print \"\\nUse `autoIB.py <command> --help' to get more detailed help.\"\n sys.exit(1)\n command = commands[0]\n COMMANDS[command]()\n"
},
{
"alpha_fraction": 0.665544331073761,
"alphanum_fraction": 0.7014590501785278,
"avg_line_length": 37.739131927490234,
"blob_id": "c60757d4edff3f5372c5f83d3a2658a01f7eb1d3",
"content_id": "dd84db57adb976764099fc208d717f1f0416bd7e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 891,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 23,
"path": "/test_topic_collector.sh",
"repo_name": "cms-sw/ib-scheduler",
"src_encoding": "UTF-8",
"text": "#!/bin/sh -ex\ncat << \\EOF | ./ws_sso_content_reader.py -p- https://eulisse.web.cern.ch/eulisse/cgi-bin/git-collector/buildrequests/3 -m PATCH\n{\n \"pid\": \"100\"\n}\nEOF\n./ws_sso_content_reader.py https://eulisse.web.cern.ch/eulisse/cgi-bin/git-collector/buildrequests/1 -m DELETE\ncat << \\EOF | ./ws_sso_content_reader.py -p- https://eulisse.web.cern.ch/eulisse/cgi-bin/git-collector/buildrequests\n{\n \"architecture\": \"slc5_amd64_gcc472\",\n \"release_name\": \"CMSSW_6_2_X_2013-04-08-0200\",\n \"repository\": \"cms\",\n \"PKGTOOLS\": \"ktf:my-branch\",\n \"CMSDIST\": \"ktf:another-branch\",\n \"ignoreErrors\": true,\n \"package\": \"cmssw-ib\",\n \"continuations\": \"cmssw-qa:slc5_amd64_gcc472\",\n \"syncBack\": false,\n \"debug\": false \n}\nEOF\n./ws_sso_content_reader.py https://cern.ch/eulisse/cgi-bin/git-collector/buildrequests\n./ws_sso_content_reader.py https://cern.ch/eulisse/cgi-bin/git-collector/buildrequests/3\n"
}
] | 11 |
ShokuApp/professional-forward | https://github.com/ShokuApp/professional-forward | 27b3bafae8f357b37bcebd715c43e073dcedc761 | c892be90179a2886bbe397bc3bd6a11361e149aa | 497821c739c5748f3e076aa560b74207cec201e9 | refs/heads/main | 2023-02-28T07:33:46.468298 | 2020-12-11T09:15:17 | 2020-12-11T09:15:17 | 317,159,232 | 0 | 0 | null | 2020-11-30T08:34:55 | 2021-01-07T16:17:02 | 2021-02-05T12:30:24 | TypeScript | [
{
"alpha_fraction": 0.663385808467865,
"alphanum_fraction": 0.6640419960021973,
"avg_line_length": 23.983606338500977,
"blob_id": "54a624c524e2ab288df6a7e7ba24f3c45caf57b0",
"content_id": "64568a972ea876ecb14fd417f7eb1a0549816f0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 1524,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 61,
"path": "/src/repositories/profile.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Repository } from \"./repository\";\nimport { Profile } from \"../models\";\n\nimport profiles from \"../../data/profiles/data.json\";\nimport { RestaurantRepository } from \"./restaurant\";\n\nconst restaurantRepository = new RestaurantRepository();\n\n// deepcode ignore no-any: JSON\nasync function fromJSON(profileJson: any): Promise<Profile> {\n const restaurant = await restaurantRepository.get(profileJson.restaurant);\n\n return {\n id: profileJson.id,\n email: profileJson.email,\n firstName: profileJson.firstName,\n lastName: profileJson.lastName,\n restaurant,\n };\n}\n\nfunction toJSON(profile: Profile) {\n return {\n id: profile.id,\n email: profile.email,\n firstName: profile.firstName,\n lastName: profile.lastName,\n restaurant: profile.restaurant.id,\n };\n}\n\nexport class ProfileRepository implements Repository<Profile> {\n async get(id: string): Promise<Profile> {\n const profileJson = profiles.find((item) => item.id === id);\n\n if (profileJson === undefined) {\n throw Error(\"Profile not found\");\n }\n\n return fromJSON(profileJson);\n }\n\n async set(profile: Profile): Promise<void> {\n const index = profiles.findIndex((item) => item.id === profile.id);\n const profileJson = toJSON(profile);\n\n if (index !== -1) {\n profiles[index] = profileJson;\n } else {\n profiles.push(profileJson);\n }\n }\n\n async list(): Promise<Profile[]> {\n return Promise.all(\n profiles.map((profileJson) => {\n return fromJSON(profileJson);\n })\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6481802463531494,
"alphanum_fraction": 0.6481802463531494,
"avg_line_length": 15.485713958740234,
"blob_id": "a1ad110f6bf9157206fb5a1e831f7ff271d16338",
"content_id": "719f76a605719967770426a28f65e9637c024b75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 577,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 35,
"path": "/src/blocs/dish/event.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Dish } from \"../../models\";\n\nexport abstract class DishEvent {}\n\nexport class DishCreateEvent extends DishEvent {\n dish: Dish;\n\n constructor(dish: Dish) {\n super();\n this.dish = dish;\n }\n}\n\nexport class DishGetEvent extends DishEvent {\n id: string;\n\n constructor(id: string) {\n super();\n this.id = id;\n }\n}\n\nexport class DishSetEvent extends DishEvent {\n id: string;\n dish: Partial<Dish>;\n\n constructor(id: string, dish: Partial<Dish>) {\n super();\n\n this.id = id;\n this.dish = dish;\n }\n}\n\nexport class DishListEvent extends DishEvent {}\n"
},
{
"alpha_fraction": 0.6472602486610413,
"alphanum_fraction": 0.6472602486610413,
"avg_line_length": 21.711111068725586,
"blob_id": "7e34bfae967400389b8bb142be00a40f0c4cf071",
"content_id": "c4ed4a3a6eda75f070734a0119ddc29486210de2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 2044,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 90,
"path": "/src/blocs/sauce/bloc.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Bloc } from \"@felangel/bloc\";\nimport {\n SauceCreateEvent,\n SauceEvent,\n SauceGetEvent,\n SauceListEvent,\n SauceSetEvent,\n} from \"./event\";\nimport {\n SauceCreateState,\n SauceErrorState,\n SauceGetState,\n SauceInitialState,\n SauceListState,\n SauceLoadingState,\n SauceSetState,\n SauceState,\n} from \"./state\";\nimport { SauceRepository } from \"../../repositories\";\nimport { Sauce } from \"../../models\";\n\nexport class SauceBloc extends Bloc<SauceEvent, SauceState> {\n private repository: SauceRepository;\n\n constructor(repository: SauceRepository) {\n super(new SauceInitialState());\n\n this.repository = repository;\n }\n\n async *mapEventToState(event: SauceEvent): AsyncIterableIterator<SauceState> {\n yield new SauceLoadingState();\n\n if (event instanceof SauceCreateEvent) {\n yield* this.create(event);\n } else if (event instanceof SauceGetEvent) {\n yield* this.get(event);\n } else if (event instanceof SauceSetEvent) {\n yield* this.set(event);\n } else if (event instanceof SauceListEvent) {\n yield* this.list(event);\n }\n }\n\n async *create(event: SauceCreateEvent) {\n try {\n await this.repository.set(event.sauce);\n\n yield new SauceCreateState();\n } catch (e) {\n yield new SauceErrorState();\n }\n }\n\n async *get(event: SauceGetEvent) {\n try {\n const sauce = await this.repository.get(event.id);\n\n yield new SauceGetState(sauce);\n } catch (e) {\n yield new SauceErrorState();\n }\n }\n\n async *set(event: SauceSetEvent) {\n try {\n const originalSauce = await this.repository.get(event.id);\n const sauce: Sauce = {\n ...originalSauce,\n ...event.sauce,\n };\n\n await this.repository.set(sauce);\n\n yield new SauceSetState(sauce);\n } catch (e) {\n yield new SauceErrorState();\n }\n }\n\n async *list(event: SauceListEvent) {\n try {\n const sauces = await this.repository.list();\n\n yield new SauceListState(sauces);\n } catch (e) {\n yield new SauceErrorState();\n }\n }\n}\n"
},
{
"alpha_fraction": 0.638052225112915,
"alphanum_fraction": 0.638052225112915,
"avg_line_length": 21.133333206176758,
"blob_id": "10ab33bc7e7db020eb8ce63b4639bb733c70da5b",
"content_id": "2cc98ceb0804627031cd703e3eddd08299283ac5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 1992,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 90,
"path": "/src/blocs/menu/bloc.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Bloc } from \"@felangel/bloc\";\nimport {\n MenuCreateEvent,\n MenuEvent,\n MenuGetEvent,\n MenuListEvent,\n MenuSetEvent,\n} from \"./event\";\nimport {\n MenuCreateState,\n MenuErrorState,\n MenuGetState,\n MenuInitialState,\n MenuListState,\n MenuLoadingState,\n MenuSetState,\n MenuState,\n} from \"./state\";\nimport { MenuRepository } from \"../../repositories\";\nimport { Menu } from \"../../models\";\n\nexport class MenuBloc extends Bloc<MenuEvent, MenuState> {\n private repository: MenuRepository;\n\n constructor(repository: MenuRepository) {\n super(new MenuInitialState());\n\n this.repository = repository;\n }\n\n async *mapEventToState(event: MenuEvent): AsyncIterableIterator<MenuState> {\n yield new MenuLoadingState();\n\n if (event instanceof MenuCreateEvent) {\n yield* this.create(event);\n } else if (event instanceof MenuGetEvent) {\n yield* this.get(event);\n } else if (event instanceof MenuSetEvent) {\n yield* this.set(event);\n } else if (event instanceof MenuListEvent) {\n yield* this.list(event);\n }\n }\n\n async *create(event: MenuCreateEvent) {\n try {\n await this.repository.set(event.menu);\n\n yield new MenuCreateState();\n } catch (e) {\n yield new MenuErrorState();\n }\n }\n\n async *get(event: MenuGetEvent) {\n try {\n const menu = await this.repository.get(event.id);\n\n yield new MenuGetState(menu);\n } catch (e) {\n yield new MenuErrorState();\n }\n }\n\n async *set(event: MenuSetEvent) {\n try {\n const originalMenu = await this.repository.get(event.id);\n const menu: Menu = {\n ...originalMenu,\n ...event.menu,\n };\n\n await this.repository.set(menu);\n\n yield new MenuSetState(menu);\n } catch (e) {\n yield new MenuErrorState();\n }\n }\n\n async *list(event: MenuListEvent) {\n try {\n const menus = await this.repository.list();\n\n yield new MenuListState(menus);\n } catch (e) {\n yield new MenuErrorState();\n }\n }\n}\n"
},
{
"alpha_fraction": 0.8260869383811951,
"alphanum_fraction": 0.8260869383811951,
"avg_line_length": 22,
"blob_id": "d9e3f2b217f2e9b3dc3a1b578d863d5284b06063",
"content_id": "1c31eff65f2937a3e33c0e938d450f7aed8ce550",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 23,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 1,
"path": "/README.md",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "# professional-forward\n"
},
{
"alpha_fraction": 0.6804733872413635,
"alphanum_fraction": 0.6804733872413635,
"avg_line_length": 17.77777862548828,
"blob_id": "39a2bf7d58144cc54060bcd667199a4acb858f74",
"content_id": "a63e5a2369b3a1b779b5decc5655e4665db71285",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 169,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 9,
"path": "/src/models/profile.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Restaurant } from \"./restaurant\";\n\nexport type Profile = {\n id: string;\n email: string;\n firstName: string;\n lastName: string;\n restaurant: Restaurant;\n};\n"
},
{
"alpha_fraction": 0.638052225112915,
"alphanum_fraction": 0.638052225112915,
"avg_line_length": 21.133333206176758,
"blob_id": "3712d5f3861c16bf0bac5451b49c9262c03de365",
"content_id": "f39cab8eb27dafeae83c49af492729306339ec47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 1992,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 90,
"path": "/src/blocs/card/bloc.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Bloc } from \"@felangel/bloc\";\nimport {\n CardCreateEvent,\n CardEvent,\n CardGetEvent,\n CardListEvent,\n CardSetEvent,\n} from \"./event\";\nimport {\n CardCreateState,\n CardErrorState,\n CardGetState,\n CardInitialState,\n CardListState,\n CardLoadingState,\n CardSetState,\n CardState,\n} from \"./state\";\nimport { CardRepository } from \"../../repositories\";\nimport { Card } from \"../../models\";\n\nexport class CardBloc extends Bloc<CardEvent, CardState> {\n private repository: CardRepository;\n\n constructor(repository: CardRepository) {\n super(new CardInitialState());\n\n this.repository = repository;\n }\n\n async *mapEventToState(event: CardEvent): AsyncIterableIterator<CardState> {\n yield new CardLoadingState();\n\n if (event instanceof CardCreateEvent) {\n yield* this.create(event);\n } else if (event instanceof CardGetEvent) {\n yield* this.get(event);\n } else if (event instanceof CardSetEvent) {\n yield* this.set(event);\n } else if (event instanceof CardListEvent) {\n yield* this.list(event);\n }\n }\n\n async *create(event: CardCreateEvent) {\n try {\n await this.repository.set(event.card);\n\n yield new CardCreateState();\n } catch (e) {\n yield new CardErrorState();\n }\n }\n\n async *get(event: CardGetEvent) {\n try {\n const card = await this.repository.get(event.id);\n\n yield new CardGetState(card);\n } catch (e) {\n yield new CardErrorState();\n }\n }\n\n async *set(event: CardSetEvent) {\n try {\n const originalCard = await this.repository.get(event.id);\n const card: Card = {\n ...originalCard,\n ...event.card,\n };\n\n await this.repository.set(card);\n\n yield new CardSetState(card);\n } catch (e) {\n yield new CardErrorState();\n }\n }\n\n async *list(event: CardListEvent) {\n try {\n const cards = await this.repository.list();\n\n yield new CardListState(cards);\n } catch (e) {\n yield new CardErrorState();\n }\n }\n}\n"
},
{
"alpha_fraction": 0.6846885681152344,
"alphanum_fraction": 0.6846885681152344,
"avg_line_length": 24.130434036254883,
"blob_id": "8020242b2b18aa85c2ddbfd0e76e1bf8ff70db4a",
"content_id": "81a5741df2951dc9858143677cee2099532d7e97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 2312,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 92,
"path": "/src/blocs/restaurant/bloc.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Bloc } from \"@felangel/bloc\";\nimport {\n RestaurantCreateEvent,\n RestaurantEvent,\n RestaurantGetEvent,\n RestaurantListEvent,\n RestaurantSetEvent,\n} from \"./event\";\nimport {\n RestaurantCreateState,\n RestaurantErrorState,\n RestaurantGetState,\n RestaurantInitialState,\n RestaurantListState,\n RestaurantLoadingState,\n RestaurantSetState,\n RestaurantState,\n} from \"./state\";\nimport { RestaurantRepository } from \"../../repositories\";\nimport { Restaurant } from \"../../models\";\n\nexport class RestaurantBloc extends Bloc<RestaurantEvent, RestaurantState> {\n private repository: RestaurantRepository;\n\n constructor(repository: RestaurantRepository) {\n super(new RestaurantInitialState());\n\n this.repository = repository;\n }\n\n async *mapEventToState(\n event: RestaurantEvent\n ): AsyncIterableIterator<RestaurantState> {\n yield new RestaurantLoadingState();\n\n if (event instanceof RestaurantCreateEvent) {\n yield* this.create(event);\n } else if (event instanceof RestaurantGetEvent) {\n yield* this.get(event);\n } else if (event instanceof RestaurantSetEvent) {\n yield* this.set(event);\n } else if (event instanceof RestaurantListEvent) {\n yield* this.list(event);\n }\n }\n\n async *create(event: RestaurantCreateEvent) {\n try {\n await this.repository.set(event.restaurant);\n\n yield new RestaurantCreateState();\n } catch (e) {\n yield new RestaurantErrorState();\n }\n }\n\n async *get(event: RestaurantGetEvent) {\n try {\n const restaurant = await this.repository.get(event.id);\n\n yield new RestaurantGetState(restaurant);\n } catch (e) {\n yield new RestaurantErrorState();\n }\n }\n\n async *set(event: RestaurantSetEvent) {\n try {\n const originalRestaurant = await this.repository.get(event.id);\n const restaurant: Restaurant = {\n ...originalRestaurant,\n ...event.restaurant,\n };\n\n await this.repository.set(restaurant);\n\n yield new RestaurantSetState(restaurant);\n } catch (e) {\n yield new RestaurantErrorState();\n }\n }\n\n async *list(event: RestaurantListEvent) {\n try {\n const restaurants = await this.repository.list();\n\n yield new RestaurantListState(restaurants);\n } catch (e) {\n yield new RestaurantErrorState();\n }\n }\n}\n"
},
{
"alpha_fraction": 0.6589018106460571,
"alphanum_fraction": 0.6589018106460571,
"avg_line_length": 15.243243217468262,
"blob_id": "9510c0d646d1454978632244c6ab071ff92372d2",
"content_id": "14bedb87b22a217b52ee3358274a6995e075f767",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 601,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 37,
"path": "/src/blocs/sauce/event.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Sauce } from \"../../models\";\n\nexport abstract class SauceEvent {}\n\nexport class SauceCreateEvent extends SauceEvent {\n sauce: Sauce;\n\n constructor(sauce: Sauce) {\n super();\n\n this.sauce = sauce;\n }\n}\n\nexport class SauceGetEvent extends SauceEvent {\n id: string;\n\n constructor(id: string) {\n super();\n\n this.id = id;\n }\n}\n\nexport class SauceSetEvent extends SauceEvent {\n id: string;\n sauce: Partial<Sauce>;\n\n constructor(id: string, sauce: Partial<Sauce>) {\n super();\n\n this.id = id;\n this.sauce = sauce;\n }\n}\n\nexport class SauceListEvent extends SauceEvent {}\n"
},
{
"alpha_fraction": 0.6470588445663452,
"alphanum_fraction": 0.6470588445663452,
"avg_line_length": 15.05555534362793,
"blob_id": "8cd23a3c8c8d0b7a675f0643a0535ae35b46d7e1",
"content_id": "b0b26c3bd1c75c62ea0bdb78c40c647b87f87070",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 578,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 36,
"path": "/src/blocs/card/event.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Card } from \"../../models\";\n\nexport abstract class CardEvent {}\n\nexport class CardCreateEvent extends CardEvent {\n card: Card;\n\n constructor(card: Card) {\n super();\n\n this.card = card;\n }\n}\n\nexport class CardGetEvent extends CardEvent {\n id: string;\n\n constructor(id: string) {\n super();\n this.id = id;\n }\n}\n\nexport class CardSetEvent extends CardEvent {\n id: string;\n card: Partial<Card>;\n\n constructor(id: string, card: Partial<Card>) {\n super();\n\n this.id = id;\n this.card = card;\n }\n}\n\nexport class CardListEvent extends CardEvent {}\n"
},
{
"alpha_fraction": 0.5516749620437622,
"alphanum_fraction": 0.5661974549293518,
"avg_line_length": 27.77948760986328,
"blob_id": "8e0b3fef9d94a2aae5cbab18cc16c1474ef4ac47",
"content_id": "63f898e4cbabbe9f7a0710ac2b57e8eae0b1e634",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11224,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 390,
"path": "/tools/generate.py",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\nimport json\nimport os\nimport random\nimport uuid\nfrom datetime import datetime\n\n\ndef get_profile_names():\n file = open(\"./tools/samples/profile-name.txt\")\n lines = file.readlines()\n file.close()\n\n lines = [line.replace('\\n', '') for line in lines]\n\n return lines\n\n\ndef get_restaurant_names():\n file = open(\"./tools/samples/restaurant-name.txt\")\n lines = file.readlines()\n file.close()\n\n lines = [line.replace('\\n', '') for line in lines]\n\n return lines\n\n\ndef get_street_names():\n file = open(\"./tools/samples/street-name.txt\")\n lines = file.readlines()\n file.close()\n\n lines = [line.replace('\\n', '') for line in lines]\n\n return lines\n\n\ndef get_postal_codes():\n file = open(\"./tools/samples/postal-code.txt\")\n lines = file.readlines()\n file.close()\n\n lines = [line.replace('\\n', '') for line in lines]\n\n return lines\n\n\ndef save_to_file(data, path):\n with open(path, 'w') as outfile:\n json.dump(data, outfile, indent=4, ensure_ascii=False)\n return\n\n\ndef pictogram(number):\n data = {\"id\": str(uuid.uuid4()),\n \"name\": \"Pictogram \" + str(number),\n \"image\": \"https://source.unsplash.com/random\"}\n\n return data\n\n\ndef pictogram_list(length):\n data = []\n\n for i in range(1, length + 1):\n data.append(pictogram(i))\n\n return data\n\n\ndef ingredient(number, pictogram_data):\n data = {\"id\": str(uuid.uuid4()),\n \"name\": \"Ingredient \" + str(number),\n \"image\": \"https://source.unsplash.com/random\",\n \"allergens\": [],\n \"diets\": []}\n\n for i in range(1, random.randint(0, 5)):\n elem = random.choice(pictogram_data)\n if elem[\"id\"] not in data[\"allergens\"]:\n data[\"allergens\"].append(elem[\"id\"])\n\n for i in range(1, random.randint(2, 6)):\n elem = random.choice(pictogram_data)\n if elem[\"id\"] not in data[\"diets\"]:\n data[\"diets\"].append(elem[\"id\"])\n\n return data\n\n\ndef ingredient_list(length, pictogram_data):\n data = []\n\n for i in range(1, length + 1):\n data.append(ingredient(i, pictogram_data))\n\n return data\n\n\ndef sauce(number, ingredient_data):\n data = {\"id\": str(uuid.uuid4()),\n \"name\": \"Sauce \" + str(number),\n \"ingredients\": []}\n\n for i in range(1, random.randint(2, 4)):\n elem = random.choice(ingredient_data)\n if elem[\"id\"] not in data[\"ingredients\"]:\n data[\"ingredients\"].append(elem[\"id\"])\n\n return data\n\n\ndef sauce_list(length, ingredient_data):\n data = []\n\n for i in range(1, length + 1):\n data.append(sauce(i, ingredient_data))\n\n return data\n\n\ndef dish(number, ingredient_data, sauce_data):\n data = {\"id\": str(uuid.uuid4()),\n \"name\": \"Dish \" + str(number),\n \"type\": random.choice([\"starter\", \"plate\", \"dessert\"]),\n \"description\": \"A generic description\",\n \"price\": str(random.randint(5, 25)),\n \"ingredients\": [],\n \"sauces\": [],\n \"is_adaptable\": random.choice([\"true\", \"false\"])}\n\n for i in range(1, random.randint(2, 6)):\n elem = random.choice(ingredient_data)\n if elem[\"id\"] not in data[\"ingredients\"]:\n data[\"ingredients\"].append(elem[\"id\"])\n\n if random.randint(1, 2) == 1:\n elem = random.choice(sauce_data)\n data[\"sauces\"].append(elem[\"id\"])\n\n return data\n\n\ndef dish_list(length, ingredient_data, sauce_data):\n data = []\n\n for i in range(1, length + 1):\n data.append(dish(i, ingredient_data, sauce_data))\n\n return data\n\n\ndef menu(number, dish_data):\n data = {\"id\": str(uuid.uuid4()),\n \"name\": \"Menu \" + str(number),\n \"price\": str(random.randint(10, 50)),\n \"dishes\": []}\n\n for i in range(1, random.randint(2, 4)):\n elem = random.choice(dish_data)\n if elem[\"id\"] not in data[\"dishes\"]:\n data[\"dishes\"].append(elem[\"id\"])\n\n return data\n\n\ndef menu_list(length, dish_data):\n data = []\n\n for i in range(1, length + 1):\n data.append(menu(i, dish_data))\n\n return data\n\n\ndef card(number, dish_data, menu_data):\n data = {\"id\": str(uuid.uuid4()),\n \"name\": \"Card \" + str(number),\n \"dishes\": [],\n \"menus\": []}\n\n for i in range(1, random.randint(5, 10)):\n elem = random.choice(dish_data)\n if elem[\"id\"] not in data[\"dishes\"]:\n data[\"dishes\"].append(elem[\"id\"])\n\n for i in range(1, random.randint(2, 6)):\n elem = random.choice(menu_data)\n if elem[\"id\"] not in data[\"menus\"]:\n data[\"menus\"].append(elem[\"id\"])\n\n return data\n\n\ndef card_list(length, dish_data, menu_data):\n data = []\n\n for i in range(1, length + 1):\n data.append(card(i, dish_data, menu_data))\n\n return data\n\n\ndef restaurant(restaurant_name, street_name, postal_code, card_data, dish_data):\n longitude_float = random.randint(41000, 47000)\n latitude_float = random.randint(57000, 60000)\n\n data = {\"id\": str(uuid.uuid4()),\n \"name\": restaurant_name,\n \"description\": \"A generic description\",\n \"image\": \"https://source.unsplash.com/random\",\n \"average_rate\": str(random.randint(0, 6)),\n \"average_price\": str(random.randint(5, 30)),\n \"address\": {\n \"street_number\": str(random.randint(1, 200)),\n \"street\": street_name,\n \"postal_code\": postal_code,\n \"city\": \"Toulouse\",\n \"country\": \"France\",\n },\n \"location\": {\n \"latitude\": \"43.{}\".format(latitude_float),\n \"longitude\": \"1.{}\".format(longitude_float),\n },\n \"phone\": \"01 02 03 04 05\",\n \"url\": \"https://example.com\",\n \"opening_time\": [[], [], [], [], [], [], []],\n \"current_card\": \"\",\n \"cards\": [],\n \"dishes\": []}\n\n def generate_hour():\n hour = str(random.randint(0, 23))\n\n if len(hour) == 1:\n return \"0\" + hour\n else:\n return hour\n\n def generate_min():\n minutes = str(random.randint(0, 59))\n\n if len(minutes) == 1:\n return \"0\" + minutes\n else:\n return minutes\n\n for i in range(0, 7):\n from_hour = generate_hour() + \":\" + generate_min()\n to_hour = generate_hour() + \":\" + generate_min()\n\n if random.randint(0, 7) == 0:\n continue\n\n if int(from_hour.split(\":\")[0]) < int(to_hour.split(\":\")[0]):\n data[\"opening_time\"][i].append({\"from\": from_hour, \"to\": to_hour})\n elif int(from_hour.split(\":\")[0]) > int(to_hour.split(\":\")[0]):\n data[\"opening_time\"][i].append({\"from\": to_hour, \"to\": from_hour})\n else:\n if int(from_hour.split(\":\")[1]) < int(to_hour.split(\":\")[1]):\n data[\"opening_time\"][i].append({\"from\": from_hour, \"to\": to_hour})\n elif int(from_hour.split(\":\")[1]) > int(to_hour.split(\":\")[1]):\n data[\"opening_time\"][i].append({\"from\": to_hour, \"to\": from_hour})\n\n for i in range(1, random.randint(2, 6)):\n elem = random.choice(card_data)\n if elem[\"id\"] not in data[\"cards\"]:\n data[\"cards\"].append(elem[\"id\"])\n\n for i in range(1, random.randint(10, 50)):\n elem = random.choice(dish_data)\n if elem[\"id\"] not in data[\"dishes\"]:\n data[\"dishes\"].append(elem[\"id\"])\n\n data[\"current_card\"] = random.choice(data[\"cards\"])\n\n return data\n\n\ndef restaurant_list(length, restaurant_names, street_names, postal_codes, card_data, dish_data):\n data = []\n\n for i in range(1, length + 1):\n restaurant_name = random.choice(restaurant_names)\n street_name = random.choice(street_names)\n postal_code = random.choice(postal_codes)\n\n restaurant_names.remove(restaurant_name)\n street_names.remove(street_name)\n\n data.append(restaurant(restaurant_name, street_name, postal_code, card_data, dish_data))\n\n return data\n\n\ndef profile(name, restaurant_data):\n first_name, last_name = name.split(\" \")\n\n data = {\"id\": str(uuid.uuid4()),\n \"email\": first_name.lower() + \".\" + last_name.lower() + \"@example.com\",\n \"firstName\": first_name,\n \"lastName\": last_name,\n \"restaurant\": random.choice(restaurant_data)[\"id\"]}\n\n return data\n\n\ndef profile_list(length, profile_names, restaurant_data):\n data = []\n\n for i in range(1, length + 1):\n name = random.choice(profile_names)\n\n profile_names.remove(name)\n\n data.append(profile(name, restaurant_data))\n\n return data\n\n\ndef main():\n random.seed(datetime.now())\n\n profile_names = get_profile_names()\n restaurant_names = get_restaurant_names()\n street_names = get_street_names()\n postal_codes = get_postal_codes()\n\n if os.path.exists(\"./data/pictograms/initial-data.json\"):\n file = open(\"./data/pictograms/initial-data.json\")\n pictogram_data = json.load(file)\n file.close()\n else:\n pictogram_data = pictogram_list(random.randint(5, 100))\n save_to_file(pictogram_data, \"./data/pictograms/data.json\")\n\n if os.path.exists(\"./data/ingredients/initial-data.json\"):\n file = open(\"./data/ingredients/initial-data.json\")\n ingredient_data = json.load(file)\n file.close()\n else:\n ingredient_data = ingredient_list(random.randint(5, 100), pictogram_data)\n save_to_file(ingredient_data, \"./data/ingredients/data.json\")\n\n if os.path.exists(\"./data/sauces/initial-data.json\"):\n file = open(\"./data/sauces/initial-data.json\")\n sauce_data = json.load(file)\n file.close()\n else:\n sauce_data = sauce_list(random.randint(5, 100), ingredient_data)\n save_to_file(sauce_data, \"./data/sauces/data.json\")\n\n if os.path.exists(\"./data/dishes/initial-data.json\"):\n file = open(\"./data/dishes/initial-data.json\")\n dish_data = json.load(file)\n file.close()\n else:\n dish_data = dish_list(random.randint(5, 100), ingredient_data, sauce_data)\n save_to_file(dish_data, \"./data/dishes/data.json\")\n\n if os.path.exists(\"./data/menus/initial-data.json\"):\n file = open(\"./data/menus/initial-data.json\")\n menu_data = json.load(file)\n file.close()\n else:\n menu_data = menu_list(random.randint(5, 100), dish_data)\n save_to_file(menu_data, \"./data/menus/data.json\")\n\n if os.path.exists(\"./data/cards/initial-data.json\"):\n file = open(\"./data/cards/initial-data.json\")\n card_data = json.load(file)\n file.close()\n else:\n card_data = card_list(random.randint(5, 100), dish_data, menu_data)\n save_to_file(card_data, \"./data/cards/data.json\")\n\n restaurant_data = restaurant_list(random.randint(5, 100), restaurant_names, street_names, postal_codes, card_data,\n dish_data)\n save_to_file(restaurant_data, \"./data/restaurants/data.json\")\n\n profile_data = profile_list(random.randint(5, 100), profile_names, restaurant_data)\n save_to_file(profile_data, \"./data/profiles/data.json\")\n\n return\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7126760482788086,
"alphanum_fraction": 0.7126760482788086,
"avg_line_length": 18.72222137451172,
"blob_id": "9a39528f297a3c1c7022e9e3c596d348efaab79d",
"content_id": "a2e0d3ef6eb38f473bd27cc6334ade6dc135731c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 710,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 36,
"path": "/src/blocs/restaurant/event.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Restaurant } from \"../../models\";\n\nexport abstract class RestaurantEvent {}\n\nexport class RestaurantCreateEvent extends RestaurantEvent {\n restaurant: Restaurant;\n\n constructor(restaurant: Restaurant) {\n super();\n\n this.restaurant = restaurant;\n }\n}\n\nexport class RestaurantGetEvent extends RestaurantEvent {\n id: string;\n\n constructor(id: string) {\n super();\n this.id = id;\n }\n}\n\nexport class RestaurantSetEvent extends RestaurantEvent {\n id: string;\n restaurant: Partial<Restaurant>;\n\n constructor(id: string, restaurant: Partial<Restaurant>) {\n super();\n\n this.id = id;\n this.restaurant = restaurant;\n }\n}\n\nexport class RestaurantListEvent extends RestaurantEvent {}\n"
},
{
"alpha_fraction": 0.6384152173995972,
"alphanum_fraction": 0.6384152173995972,
"avg_line_length": 21.155555725097656,
"blob_id": "d4489b3dd0aac44e129e6e60debf02fa52f8c2a3",
"content_id": "14bcbdd8bc3d9d140ea32696739372c56d755029",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 1994,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 90,
"path": "/src/blocs/dish/bloc.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Bloc } from \"@felangel/bloc\";\nimport {\n DishCreateEvent,\n DishEvent,\n DishGetEvent,\n DishListEvent,\n DishSetEvent,\n} from \"./event\";\nimport {\n DishCreateState,\n DishErrorState,\n DishGetState,\n DishInitialState,\n DishListState,\n DishLoadingState,\n DishSetState,\n DishState,\n} from \"./state\";\nimport { DishRepository } from \"../../repositories\";\nimport { Dish } from \"../../models\";\n\nexport class DishBloc extends Bloc<DishEvent, DishState> {\n private repository: DishRepository;\n\n constructor(repository: DishRepository) {\n super(new DishInitialState());\n\n this.repository = repository;\n }\n\n async *mapEventToState(event: DishEvent): AsyncIterableIterator<DishState> {\n yield new DishLoadingState();\n\n if (event instanceof DishCreateEvent) {\n yield* this.create(event);\n } else if (event instanceof DishGetEvent) {\n yield* this.get(event);\n } else if (event instanceof DishSetEvent) {\n yield* this.set(event);\n } else if (event instanceof DishListEvent) {\n yield* this.list(event);\n }\n }\n\n async *create(event: DishCreateEvent) {\n try {\n await this.repository.set(event.dish);\n\n yield new DishCreateState();\n } catch (e) {\n yield new DishErrorState();\n }\n }\n\n async *get(event: DishGetEvent) {\n try {\n const dish = await this.repository.get(event.id);\n\n yield new DishGetState(dish);\n } catch (e) {\n yield new DishErrorState();\n }\n }\n\n async *set(event: DishSetEvent) {\n try {\n const originalDish = await this.repository.get(event.id);\n const dish: Dish = {\n ...originalDish,\n ...event.dish,\n };\n\n await this.repository.set(dish);\n\n yield new DishSetState(dish);\n } catch (e) {\n yield new DishErrorState();\n }\n }\n\n async *list(event: DishListEvent) {\n try {\n const dishes = await this.repository.list();\n\n yield new DishListState(dishes);\n } catch (e) {\n yield new DishErrorState();\n }\n }\n}\n"
},
{
"alpha_fraction": 0.6470588445663452,
"alphanum_fraction": 0.6470588445663452,
"avg_line_length": 15.05555534362793,
"blob_id": "7858e9af09b09267f09a0ac009f711afafe8cc6c",
"content_id": "cab5002db062f335cb4a1b3f629cc069d78a50b6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TypeScript",
"length_bytes": 578,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 36,
"path": "/src/blocs/menu/event.ts",
"repo_name": "ShokuApp/professional-forward",
"src_encoding": "UTF-8",
"text": "import { Menu } from \"../../models\";\n\nexport abstract class MenuEvent {}\n\nexport class MenuCreateEvent extends MenuEvent {\n menu: Menu;\n\n constructor(menu: Menu) {\n super();\n\n this.menu = menu;\n }\n}\n\nexport class MenuGetEvent extends MenuEvent {\n id: string;\n\n constructor(id: string) {\n super();\n this.id = id;\n }\n}\n\nexport class MenuSetEvent extends MenuEvent {\n id: string;\n menu: Partial<Menu>;\n\n constructor(id: string, menu: Partial<Menu>) {\n super();\n\n this.id = id;\n this.menu = menu;\n }\n}\n\nexport class MenuListEvent extends MenuEvent {}\n"
}
] | 14 |
rtvasu/ant-colony-tsp | https://github.com/rtvasu/ant-colony-tsp | 1e7edc1f015a4daff6a4949cdad532652e028373 | 0b2a200597389d38c7f852d2e1eb6cf1a2b15fdb | 04266f05766030bf0468b687a42cc2253cd0f151 | refs/heads/main | 2023-08-06T09:00:17.705300 | 2021-07-25T21:50:17 | 2021-07-25T21:50:17 | 389,446,166 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5859728455543518,
"alphanum_fraction": 0.622171938419342,
"avg_line_length": 47.11111068725586,
"blob_id": "2346dbcb8d0c51df834e750ab0498a35857f82ab",
"content_id": "26e6f6b4ab937ae389936cd7b09157f86be7ee78",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 442,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 9,
"path": "/README.md",
"repo_name": "rtvasu/ant-colony-tsp",
"src_encoding": "UTF-8",
"text": "# Index #\r\n- aco.py...............Problem 3/Question 2/Part 1, 2\r\n- aco-a.py...........Problem 3/Question 2/Part 3a\r\n- aco-b.py...........Problem 3/Question 2/Part 3b\r\n- aco-c.py...........Problem 3/Question 2/Part 3c\r\n- aco-d.py...........Problem 3/Question 2/Part 3d\r\n\r\n# How to Run It #\r\nRun the python files using py -m filename.py, and if that doesn't work, you can try location-of-python-exec/python.exe \"location-of-file/filename.py\"\r\n"
},
{
"alpha_fraction": 0.5430908203125,
"alphanum_fraction": 0.5556720495223999,
"avg_line_length": 34.70000076293945,
"blob_id": "02d34abdd22f45fb795bac8f11e788ead3af451e",
"content_id": "b872fc3d451cf5d165ce25e4f6bbc34fd142bdb8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4769,
"license_type": "no_license",
"max_line_length": 156,
"num_lines": 130,
"path": "/aco-a.py",
"repo_name": "rtvasu/ant-colony-tsp",
"src_encoding": "UTF-8",
"text": "from parameters import *\r\nimport numpy as np\r\n\r\ndef euclidean(c1, c2):\r\n return pow(pow(c1[0] - c2[0], 2) + pow(c1[1] - c2[1], 2), 0.5)\r\n\r\ndef parse(filename):\r\n with open(filename) as f:\r\n content = f.readlines()\r\n\r\n for line in content:\r\n l = line.strip().split()\r\n content[int(l[0]) - 1] = (int(l[1]), int(l[2]))\r\n return content\r\n\r\ndef distance(list):\r\n distance = [[0 for i in range(len(list))] for j in range(len(list))]\r\n for i in range(len(list)):\r\n for j in range(len(list)):\r\n if (i == j):\r\n distance[i][j] = 0\r\n elif (i < j):\r\n distance[i][j] = euclidean(list[i], list[j])\r\n else:\r\n distance[i][j] = distance[j][i]\r\n return distance\r\n\r\ndef placeInitPheromone():\r\n pheromone = [[0 for i in range(numCities)] for j in range(numCities)]\r\n for i in range(len(pheromone)):\r\n for j in range(len(pheromone)):\r\n if (i == j):\r\n pheromone[i][j] = 0\r\n elif (i < j):\r\n pheromone[i][j] = initPheromoneAmt\r\n else:\r\n pheromone[i][j] = pheromone[j][i]\r\n return pheromone\r\n\r\ndef assignCities():\r\n antLocations = [-1 for i in range(numAnts)]\r\n cityPerGroup = np.random.permutation(numCities)\r\n for i in range(numAnts):\r\n antLocations[i] = cityPerGroup[i%numCities]\r\n return antLocations\r\n\r\ndef pseudorandomProportionalRule(path):\r\n probOfSelection = [0 for i in range(numCities)]\r\n src = path[0]\r\n for j in range(numCities):\r\n if (j in path):\r\n probOfSelection[j] = 0\r\n else:\r\n probOfSelection[j] = pheromoneAmounts[src][j]/pow(distances[src][j], beta)\r\n return probOfSelection\r\n\r\ndef antSystemTransitionRule(path):\r\n probOfSelection = [0 for i in range(numCities)]\r\n # find denominator\r\n src = path[-1]\r\n den = 0\r\n for dest in range(numCities):\r\n if (src == dest):\r\n continue\r\n den += pow(pheromoneAmounts[src][dest], alpha)/pow(distances[src][dest], beta)\r\n\r\n for j in range(numCities):\r\n if (j in path):\r\n probOfSelection[j] = 0\r\n else:\r\n probOfSelection[j] = pow(pheromoneAmounts[src][j], alpha)/pow(distances[src][j], beta)\r\n if (den == 0):\r\n probOfSelection[j] = pow(10, 100)\r\n else:\r\n probOfSelection[j] /= den\r\n return probOfSelection\r\n\r\ndef evaporate():\r\n for i in range(numCities):\r\n for j in range(numCities):\r\n pheromoneAmounts[i][j] = (1 - evapRate)*pheromoneAmounts[i][j]\r\n\r\ndef offlinePheromoneUpdate(length, path):\r\n for i in range(numCities):\r\n for j in range(numCities):\r\n if (i < j):\r\n for k in range(len(path) - 1):\r\n if (path[k] == i and path[k + 1] == j) or (path[k] == j and path[k + 1] == i):\r\n pheromoneAmounts[i][j] = ((1 - evapRate)*pheromoneAmounts[i][j]) + (evapRate/length)\r\n else:\r\n pheromoneAmounts[i][j] = ((1 - evapRate)*pheromoneAmounts[i][j])\r\n elif (i > j):\r\n pheromoneAmounts[i][j] = pheromoneAmounts[j][i]\r\n\r\ndef forage():\r\n paths = [[antLocations[i]] for i in range(numAnts)]\r\n pathLengths = [0 for i in range(numAnts)]\r\n for i in range(maxIter):\r\n q = np.random.rand()\r\n for j in range(numAnts):\r\n probOfSelection = []\r\n if (q <= q0):\r\n probOfSelection = pseudorandomProportionalRule(paths[j])\r\n else:\r\n probOfSelection = antSystemTransitionRule(paths[j])\r\n evaporate()\r\n maximum = max(probOfSelection)\r\n if (maximum != 0):\r\n paths[j].append(probOfSelection.index(max(probOfSelection)))\r\n pathLengths[j] += distances[paths[j][-2]][paths[j][-1]]\r\n pheromoneAmounts[paths[j][0]][paths[j][-1]] = ((1 - decayCoeff)*pheromoneAmounts[paths[j][0]][paths[j][-1]]) + (decayCoeff*initPheromoneAmt)\r\n pheromoneAmounts[paths[j][-1]][paths[j][0]] = pheromoneAmounts[paths[j][0]][paths[j][-1]]\r\n bestAnt = pathLengths.index(min(pathLengths))\r\n offlinePheromoneUpdate(pathLengths[bestAnt], paths[bestAnt])\r\n return min(pathLengths), paths[pathLengths.index(min(pathLengths))]\r\n\r\ncoordinates = parse(filename)\r\ndistances = distance(coordinates)\r\n\r\nfor i in range(3):\r\n numAnts = numAntsList[0]\r\n alpha = alphaList[0]\r\n beta = betaList[0]\r\n decayCoeff = decayCoeffList[i]\r\n\r\n pheromoneAmounts = placeInitPheromone()\r\n antLocations = assignCities()\r\n bestPathLength, bestPath = forage()\r\n\r\n print(round(bestPathLength), bestPath, len(bestPath))"
},
{
"alpha_fraction": 0.46226415038108826,
"alphanum_fraction": 0.6273584961891174,
"avg_line_length": 19.399999618530273,
"blob_id": "01da17cfac07103037fbf15362b54131bbd86a26",
"content_id": "2a66c375eab48f8f18ecc2d2d1550f0542b0e73e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 212,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 10,
"path": "/parameters.py",
"repo_name": "rtvasu/ant-colony-tsp",
"src_encoding": "UTF-8",
"text": "filename = 'bays29.tsp'\r\nnumAntsList = [30, 125]\r\ninitPheromoneAmt = 0.0001\r\nnumCities = 29\r\nalphaList = [9, 5, 2]\r\nbetaList = [12, 2, 1]\r\nmaxIter = 500\r\nevapRate = 0.2\r\nq0 = 0.6\r\ndecayCoeffList = [0.2, 0.4, 0.6]"
}
] | 3 |
xiongm/spark-wiki-test | https://github.com/xiongm/spark-wiki-test | a82d84158322359523d4e2fc1491e646f363f503 | b987a05b5ca17c5701df31e55f08724d1add1850 | e9b9e888f74443536596fc244e6f3a2c78202049 | refs/heads/master | 2020-12-02T18:09:25.611760 | 2017-06-27T19:36:37 | 2017-06-27T19:36:37 | 96,484,318 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7647058963775635,
"alphanum_fraction": 0.7647058963775635,
"avg_line_length": 17,
"blob_id": "8bdeeab9c5eb6e37697c6d20e7cf874ac9da290b",
"content_id": "3cdba84a2daf814f5c4d35d518982786f8278c1a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 17,
"license_type": "permissive",
"max_line_length": 17,
"num_lines": 1,
"path": "/README.md",
"repo_name": "xiongm/spark-wiki-test",
"src_encoding": "UTF-8",
"text": "# spark-wiki-test"
},
{
"alpha_fraction": 0.625525951385498,
"alphanum_fraction": 0.6381486654281616,
"avg_line_length": 32.88888931274414,
"blob_id": "4c64a54ac0acb2ad2d1b138dbdc0da9d5a3116cc",
"content_id": "9627af2fb56bca7748e3547ca4aa2a8fdaf00b8b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2139,
"license_type": "permissive",
"max_line_length": 128,
"num_lines": 63,
"path": "/wiki.py",
"repo_name": "xiongm/spark-wiki-test",
"src_encoding": "UTF-8",
"text": "# experimental spark scripts to analyse wiki page view data\n# based on\n# https://www.percona.com/blog/2015/10/07/using-apache-spark-mysql-data-analysis/\n# analyse wiki history page view data\n# to run:\n# ~/spark/bin/spark-submit ./wiki.py 2016 8 8\n\nfrom __future__ import print_function\n\nimport re\nimport sys\nimport os\nfrom operator import add\n\nfrom pyspark.sql import SparkSession, Row\nfrom datetime import timedelta, date\n\nimport urllib\n\ndef load_day(spark, file_name, my_date):\n sc = spark.sparkContext\n # lines = spark.read.text(file_name)\n lines = sc.textFile(file_name)\n\n parts = lines.map(lambda l : l.split(\" \")).filter(lambda l : l[0] == 'en').cache()\n print(parts)\n wiki = parts.map(lambda p : Row(project = p[0], url = urllib.unquote(p[1]).lower(), requests = int(p[2]), size = int(p[3])))\n df_wiki = spark.createDataFrame(wiki)\n df_wiki.createOrReplaceTempView('wikistats')\n\n df_group = spark.sql(\"select '\" + my_date + \"' as my_date, url, sum(requests) as clicks from wikistats group by url \")\n df_group.write.mode('overwrite').parquet('parquet/my_date=' + my_date)\n\nif __name__ == \"__main__\":\n if len(sys.argv) != 4:\n print(\"Usage: wiki.py <year> <from_month> <to_month>\", file=sys.stderr)\n exit(-1)\n\n print(\"WARN: This is a simple analysis of wiki page view data\\n\", file=sys.stderr)\n\n # Initialize the spark context.\n spark = SparkSession\\\n .builder\\\n .appName(\"WikiPageViews\")\\\n .getOrCreate()\n year = int(sys.argv[1])\n from_month = int(sys.argv[2])\n to_month = int(sys.argv[3])\n\n start_date = date(year, from_month, 1)\n end_date = date(year, to_month, 2)\n delta = timedelta(days=1)\n\n base_url = 'https://dumps.wikimedia.org/other/pagecounts-raw/'\n curr_date = start_date\n\n dest_path = os.path.join('.', 'datasets')\n while curr_date < end_date:\n print(\"Processing \",curr_date.strftime(\"%Y-%m-%d\"))\n file_name = dest_path + '/pagecounts-' + curr_date.strftime(\"%Y%m%d\") + '-*.gz'\n print(file_name)\n load_day(spark, file_name, curr_date.strftime(\"%Y-%m-%d\"))\n curr_date += delta\n\n\n\n\n"
},
{
"alpha_fraction": 0.5789473652839661,
"alphanum_fraction": 0.5995065569877625,
"avg_line_length": 32.72222137451172,
"blob_id": "7d3cc3baed6694ef7d35a88f4c24cae21bd3e829",
"content_id": "da08066a5d335c657f39a02b7791de786519de94",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1216,
"license_type": "permissive",
"max_line_length": 114,
"num_lines": 36,
"path": "/download.py",
"repo_name": "xiongm/spark-wiki-test",
"src_encoding": "UTF-8",
"text": "# python ./download.py 2016 08 09\n# will download whole August's data\n\nfrom __future__ import print_function\n\nimport os\nimport sys\nimport urllib\nfrom datetime import timedelta, date\n\nif __name__ == \"__main__\":\n if len(sys.argv) != 4:\n print(\"Usage: pagerank <year> <from_month> <to_month>\", file=sys.stderr)\n exit(-1)\n\n print(\"WARN: This is a simple analysis of wiki page view data\\n\", file=sys.stderr)\n\n year = sys.argv[1]\n from_month = sys.argv[2]\n to_month = sys.argv[3]\n\n start_date = date(int(year), int(from_month), 1)\n end_date = date(int(year), int(end_month), 1)\n delta = timedelta(days=1)\n\n base_url = 'https://dumps.wikimedia.org/other/pagecounts-raw/'\n curr_date = start_date\n\n dest_path = os.path.join('.', 'datasets')\n while curr_date < end_date:\n print(\"Downloading \",curr_date.strftime(\"%Y-%m-%d\"))\n url = base_url + year + '/' + year + '-' + curr_date.strftime(\"%m\") + '/'\n for i in range(0,24):\n file_name = 'pagecounts-' + curr_date.strftime(\"%Y%m%d\") + '-%(number)02d' % {\"number\": i} + '0000.gz'\n f = urllib.urlretrieve(url + file_name, os.path.join(dest_path, file_name))\n curr_date += delta\n\n\n"
}
] | 3 |
Kyle44/Driving | https://github.com/Kyle44/Driving | fe36df7569869f280944f54ff603d6ce37e4d56a | 390fa06c8b0128fe90891f9adaada0fd34dd2f81 | 57c4e81ff1915decf486f2b35cdba05e23d29e61 | refs/heads/master | 2020-07-16T09:44:34.252592 | 2015-03-28T01:13:48 | 2015-03-28T01:13:48 | 31,236,192 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.646792471408844,
"alphanum_fraction": 0.6581131815910339,
"avg_line_length": 46.32143020629883,
"blob_id": "b41e4c181902b4ee63f286a70eb4b65fcadc9435",
"content_id": "5f543b9bd507898c6e4ff8ff200e0c3aabfee107",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1325,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 28,
"path": "/hw1.py",
"repo_name": "Kyle44/Driving",
"src_encoding": "UTF-8",
"text": "# File: hw1.py\n# Written by: Kyle Fritz\n# Date: 9/13/2013\n# Lab Section: 10\n# Description: This program calculates the total miles driven, the mileage in\n# miles per gallon, and the dollar cost per mile.\n############### Use in python 3 ###################\n\n# def main() calculates the total miles driven by subtracting the initial\n# number of miles from the final number of miles. Then, it calculates\n# the mileage by dividing the total miles driven by the number of gallons\n# used. Finally, it calculates the cost per mile driven.\nprint(\"This program calculates how many miles you drove, the MPG, and the cost per mile to operate your vehicle.\")\ndef main():\n\n initial = eval(input(\"Enter your initial odometer reading: \"))\n final = eval(input(\"Enter your final odometer reading: \"))\n gallons = eval(input(\"Enter the number of gallons used: \"))\n cost = eval(input(\"Enter the cost per gallon: \"))\n tot = final - initial # This is the total miles driven.\n mileage = tot / gallons # This is the miles per gallon\n cost2 = cost / mileage # This is the cost per mile.\n print(\"You drove\", tot , \"miles.\")\n print(\"The mileage is\" ,mileage, \".\")\n print(round(cost2,2), \"is the cost per mile.\") # This rounds the cost per\n # mile to 2 decimal places.\n\nmain()\n"
},
{
"alpha_fraction": 0.7903226017951965,
"alphanum_fraction": 0.7903226017951965,
"avg_line_length": 61,
"blob_id": "5264c1269707ec304cd28b972e7343f2e5310c5c",
"content_id": "4d3bf03dd6f473b56d6e2c39b70270d4e70842c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 124,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 2,
"path": "/README.md",
"repo_name": "Kyle44/Driving",
"src_encoding": "UTF-8",
"text": "# Driving.py\nThis program calculates the total miles driven, the mileage in miles per gallon, and the dollar cost per mile.\n"
}
] | 2 |
chadmiller/orange-county-jail-booking-demographics | https://github.com/chadmiller/orange-county-jail-booking-demographics | 48c7025c6965a6676bbfde99d4dca5099f643234 | 009eae99e90b037d80af9c0d6a7db5d8df135471 | a5ad7ad267b5f272fd957b06a63c0783522a17ce | refs/heads/master | 2020-06-30T19:35:45.871834 | 2019-08-06T21:57:48 | 2019-08-06T21:57:48 | 200,929,776 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.523262083530426,
"alphanum_fraction": 0.5604454874992371,
"avg_line_length": 41.15909194946289,
"blob_id": "0c950c3c4eb744fe460d651b8deea964a6a6ff0f",
"content_id": "ac3d8b0eb9d467b5334df961644cf5d3045373c6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5567,
"license_type": "no_license",
"max_line_length": 419,
"num_lines": 132,
"path": "/upload-ocj-booking-dem",
"repo_name": "chadmiller/orange-county-jail-booking-demographics",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n# vi: filetype=python :\n\"\"\":\"\nif test \"$#\" -gt 0; then cat $1; else wget -q -O - ftp://ftp.ocfl.net/divisions/corrections/pub/bookings.pdf ; fi |pdftotext -layout -nopgbrk -x 0 -y 65 -W 10000 -H 700 - - |chronic python3 \"$0\" || wget -q ftp://ftp.ocfl.net/divisions/corrections/pub/bookings.pdf\nexit 0\n\n# That above is some crazy magic Chad made to run some shell stuff and python\n# stuff in the same file. Do not be alarmed.\n\nchronic is in \"moreutils\" Ubuntu/Debian package\npdftotext is in \"poppler-utils\"\n\"\"\"\n\n\nimport sys\nimport re\nimport os\nimport datetime\nimport requests\n\nu, p = os.environ[\"u\"], os.environ[\"p\"]\ntok = \"VuUWUpMu4pPnYES285t3R6WqK\"\n\nday = None\nbookings = []\nnext_is_addr_and_eth = False\nchecked = False\nfor line in sys.stdin:\n line = line.rstrip()\n print(line)\n\n if not line: continue\n if line.endswith(' Race/'): continue\n if line.endswith(' Gender/'): continue\n if line.startswith(\"Name \") and line.endswith('Release Date/Time'): continue\n if line.endswith(\" / DEGREE\"): continue\n\n # ' AP6-2 MISDEMEANOR / SECOND DEGREE ALCOHOL-POSSESS OR CONSUME IN PUBLIC'\n # ' 784.045(1)(a)(2)-14 FELONY / SECOND DEGREE AGGRAVATED BATTERY WITH A FIREARM'\n if re.match(r\"\"\"\n ^\\s{2,3}\n (?:\\d|PLEA|CITY|CNTY|OC|WP|AP)(?:\\.|\\d[A-Z]?|\\([A-Z]\\)|\\([a-z]\\)|\\([0-9]{1,3}\\)|-)+\n \\s+\n (?:FELONY|MISDEMEANOR)\\s/\\s(CAPITOL|LIFE|FIRST|SECOND|THIRD|\\s{3,10})?\\s?DEGREE\"\"\", line, re.VERBOSE): continue\n\n # ' CASE: DP174'\n # ' CASE: ORLANDO PD'\n m3 = re.match(r\"\\s{2,3}CASE:(?:| \\w.{4,20}|(?: \\w.{6,20}\\s{5,}|\\s{24,})(\\S.*?)\\s*)$\", line)\n if m3:\n if m3.group(1):\n bookings[-1][\"agency\"] = m3.group(1).title()\n continue\n\n if next_is_addr_and_eth:\n ' ORLANDO, FL 32801 NON-HISPANIC'\n m2 = re.match(r\"\"\"\n ^\\s\\s\n .*?\n \\s{3}\n ((?:NON-)?HISPANIC|UNKNOWN)\n (\\s{3,}ZNA)?$\"\"\", line, re.VERBOSE)\n if not m2:\n print(repr(line))\n assert day\n bookings.append({\"ID\":\"orangejail/{}/{}\".format(day, int(m1.group(1))), \"reportday\": day, \"bookingid\":int(m1.group(1)), \"race\":m1.group(2).lower(), \"sex\":m1.group(3).lower(), \"ageyears\":int(m1.group(4)), \"ethnicity\":m2.group(1).lower(), \"agency\":None, \"raceethnicity\":\"{} {}\".format({\"b\":\"black\", \"w\":\"white\", \"u\":\"\", \"a\":\"asian\"}[m1.group(2).lower()], m2.group(1).lower()).strip(), \"zna\": not not m2.group(2)})\n m1 = None\n next_is_addr_and_eth = False\n continue\n\n # 'COLONMALDONADO, STEPHANIE MARGARITA'\n # 'DEOLIVEIRARODRIGUES, GEORGIA MOHANA17003257 W/F 39 -- 2/3/2017 8:11:56AM\n # 'GAYDENDOWDELL, TERRY CHRISTOPHER 17002432 W/M 27 -- 1/26/2017 11:03:11PM'\n # \"Q', E 17006089 W/M 0 -- 3/2/2017 10:46:32PM\"\n # \"BAPTISTE, WISLY JEAN 17014464 B/U 52 -- 5/22/2017 9:47:15PM\"\n m1 = re.match(r\"\"\"\n ^(?:\\w|-\\w|'|,|\\s\\b)* (?# LASTNAME, FIRST MIDDLE)\n \\s*\n (\\d{8})\n \\s{3,}\n ([BWUA])/([FMU])\n \\s*\n (\\d?\\d?\\d)\n \\s{2} .*\"\"\", line, re.VERBOSE)\n if m1:\n next_is_addr_and_eth = True\n continue\n\n # ' TOTAL INMATES THIS REPORT: 62'\n mcheck = re.match(\"^ {1,2}TOTAL INMATES THIS REPORT: (\\d+)\", line)\n if mcheck:\n checked = int(mcheck.group(1)) == len(bookings), (len(bookings), line)\n continue\n\n\n m = re.match(\"^\\s{18,}BEGINNING AT MIDNIGHT (.*)\", line)\n if m:\n day = datetime.datetime.strptime(m.group(1), \"%m/%d/%Y\").strftime(\"%Y-%m-%d\")\n continue\n\n raise ValueError(\"Unknown line: \" + repr(line))\n\nassert checked\n\n# Step 1: Send rows with ids.\nresponse = requests.post(\"https://brigades.opendatanetwork.com/resource/tcsm-6pxj.json\", headers={\"X-App-Token\": tok}, auth=(u, p), json=bookings)\nassert response.status_code == 200, (response, response.text)\ndata = response.json()\nassert data[\"Errors\"] == 0, data\nassert (data['Rows Updated'] + data['Rows Created']) > 10, data\n\n# Step 2: Send self as attachment. In three substeps.\n# - Upload self as an \"asset\".\nresponse = requests.post(\"https://brigades.opendatanetwork.com/api/assets\", headers={\"X-App-Token\": tok}, auth=(u, p), files={ \"file\": (\"unused field\", open(sys.argv[0], \"rb\"), \"text/x-python\") })\nprint(response)\nasset_upload = response.json()\nprint(asset_upload)\nassert response.status_code == 200, (response, response.text)\nassert \"id\" in asset_upload and asset_upload[\"id\"]\n\n# - Get metadata.\nresponse = requests.get(\"https://brigades.opendatanetwork.com/views/tcsm-6pxj.json\", headers={\"X-App-Token\": tok}, auth=(u, p))\nassert response.status_code == 200, (response, response.text)\nmetadata = response.json()[\"metadata\"]\nattachments = metadata.get(\"attachments\", [])\n\n# - Update metadata to link that asset to the dataset.\nattachments = [a for a in attachments if a[\"name\"] != (u + \"/uploader\")] # remove old copy of script\nattachments.append({'blobId': asset_upload[\"id\"], 'name': u + \"/uploader\", 'filename': u + \"/uploader\"})\nmetadata[\"attachments\"] = attachments\n\nresponse = requests.put(\"https://brigades.opendatanetwork.com/views/tcsm-6pxj.json\", headers={\"X-App-Token\": tok}, auth=(u, p), json={ \"metadata\": metadata })\nassert response.status_code == 200, (response, response.text)\n\n\n"
}
] | 1 |
moazzam3890/100DaysOfCode-Python | https://github.com/moazzam3890/100DaysOfCode-Python | bde359adf73b1f92fd9eeb4b12439f013209af45 | 6294b8b157c8610a980a7a7b089de6f3fc864d6f | 3db3a94e00b0644544f1b17e71f3016f3306ad97 | refs/heads/master | 2023-07-27T13:22:05.058396 | 2021-09-10T07:00:34 | 2021-09-10T07:00:34 | 320,537,662 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5912280678749084,
"alphanum_fraction": 0.6070175170898438,
"avg_line_length": 22.75,
"blob_id": "49fce71e0e19734904a1ea0fc1a79ad6aa23db26",
"content_id": "a21a13445a11908a35c1f0a16e2d3983859e3d3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 570,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 24,
"path": "/100DaysOfCoding/snake_game/food.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle\nimport random\nSHAPE = \"circle\"\nCOLOR = \"yellow\"\nPOSITION_X = 0\nPOSITION_Y = 270\nF_WIDTH = 0.5\nF_LENGTH = 0.5\n\n\nclass Food(Turtle):\n def __init__(self):\n super().__init__()\n self.shape(SHAPE)\n self.penup()\n self.shapesize(stretch_wid=F_WIDTH, stretch_len=F_LENGTH)\n self.color(COLOR)\n self.speed(0)\n self.refresh()\n\n def refresh(self):\n random_x = random.randint(POSITION_X, POSITION_Y)\n random_y = random.randint(POSITION_X, POSITION_Y)\n self.goto(random_x, random_y)\n"
},
{
"alpha_fraction": 0.4864864945411682,
"alphanum_fraction": 0.5495495200157166,
"avg_line_length": 19.200000762939453,
"blob_id": "8dd51a0e5637c3998c72a6a14b756717390be0e8",
"content_id": "f3f0a24ff04bc5e8251689bbcdd718a72a25a078",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1170,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 55,
"path": "/100DaysOfCoding/Cross-game.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# 🚨 Don't change the code below 👇\nrow1 = [\"⬜️\",\"⬜️\",\"⬜️\"]\nrow2 = [\"⬜️\",\"⬜️\",\"⬜️\"]\nrow3 = [\"⬜️\",\"⬜️\",\"⬜️\"]\nmap = [row1, row2, row3]\nprint(f\"{row1}\\n{row2}\\n{row3}\")\nposition = input(\"Where do you want to put the treasure? \")\n# 🚨 Don't change the code above 👆\n\n#Write your code below this row 👇\n\n# if position == \"11\":\n# row1.pop(0)\n# row1.insert(0, \"X\")\n# elif position == \"12\":\n# row2.pop(0)\n# row2.insert(0, \"X\")\n# elif position ==\"13\":\n# row3.pop(0)\n# row3.insert(0, \"X\")\n# elif position == \"21\":\n# row1.pop(1)\n# row1.insert(1, \"X\")\n# elif position == \"22\":\n# row2.pop(1)\n# row2.insert(1, \"X\")\n# elif position == \"23\":\n# row3.pop(1)\n# row3.insert(1, \"X\")\n# elif position == \"31\":\n# row1.pop(2)\n# row1.insert(2, \"X\")\n# elif position == \"32\":\n# row2.pop(2)\n# row2.insert(2, \"X\")\n# elif position == \"33\":\n# row3.pop(2)\n# row3.insert(2, \"X\")\n# else:\n# print(\"Please enter a valid number\")\n\ncolomn = int(position[0])-1\nrow = int(position[1])-1\n\n\nmap[row][colomn] = \"X\"\n\n\n\n\n\n#Write your code above this row 👆\n\n# 🚨 Don't change the code below 👇\nprint(f\"{row1}\\n{row2}\\n{row3}\")"
},
{
"alpha_fraction": 0.5579557418823242,
"alphanum_fraction": 0.5848261117935181,
"avg_line_length": 29.612903594970703,
"blob_id": "f2e1d158e033ebcc80cf1df6ecea78e4c79dacbd",
"content_id": "b61cf27fd8adb14988cc3ee5b7635ec7e93b4f63",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1898,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 62,
"path": "/100DaysOfCoding/snake_game/snake.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle\nMOVE_DISTANCE = 20\nCOLOR = \"green\"\nSHAPE = \"square\"\n\n\nclass Snake:\n \"\"\"Create a snake object with default shape, color, starting position, parts object.\"\"\"\n\n def __init__(self):\n self.shape = SHAPE\n self.color = COLOR\n self.positions = [(0, 0), (-20, 0), (-40, 0)]\n self.all_snake_parts = []\n self.body_parts_creation()\n\n def body_parts_creation(self):\n for position in self.positions:\n self.add_parts(position)\n\n def add_parts(self, position):\n new_snake_part = Turtle(self.shape)\n new_snake_part.color(self.color)\n new_snake_part.penup()\n new_snake_part.goto(position)\n self.all_snake_parts.append(new_snake_part)\n\n def reset(self):\n for part in self.all_snake_parts:\n part.goto(1000, 1000)\n self.all_snake_parts.clear()\n self.body_parts_creation()\n\n def extend(self):\n self.add_parts(self.all_snake_parts[-1].position())\n\n def move(self):\n for part in range(len(self.all_snake_parts) - 1, 0, -1):\n new_x = self.all_snake_parts[part - 1].xcor()\n new_y = self.all_snake_parts[part - 1].ycor()\n self.all_snake_parts[part].goto(new_x, new_y)\n self.all_snake_parts[0].fd(MOVE_DISTANCE)\n\n def change_attr(self, shape, color):\n self.shape = shape\n self.color = color\n\n def up(self):\n if self.all_snake_parts[0].heading() != 270:\n self.all_snake_parts[0].setheading(90)\n\n def down(self):\n if self.all_snake_parts[0].heading() != 90:\n self.all_snake_parts[0].setheading(270)\n\n def left(self):\n if self.all_snake_parts[0].heading() != 0:\n self.all_snake_parts[0].setheading(180)\n\n def right(self):\n if self.all_snake_parts[0].heading() != 180:\n self.all_snake_parts[0].setheading(0)\n"
},
{
"alpha_fraction": 0.5774891972541809,
"alphanum_fraction": 0.5974025726318359,
"avg_line_length": 19.64285659790039,
"blob_id": "a3625ee88ef8310e420be74225b8aabed3b2697c",
"content_id": "00647f026e2081cdcbba1363e00edbb401384a5c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1155,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 56,
"path": "/100DaysOfCoding/Calculator/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# Calculator:\n\nfrom art import logo\n\n# Addition\ndef add(n1,n2):\n return n1 + n2\n\n# Subtraction\ndef subtract(n1, n2):\n return n1 - n2\n\n# Multiplication\ndef multiply(n1, n2):\n return n1 * n2\n\n# Division\ndef divide(n1, n2):\n return n1 / n2\n\n# Operations of Calculator:\noperations = {\n \"+\": add,\n \"-\": subtract,\n \"*\": multiply,\n \"/\": divide,\n}\n\ndef calculator():\n print(logo)\n\n num1 = float(input(\"Please enter a number: \"))\n\n for symbols in operations:\n print(symbols)\n\n continuity_check = True\n while continuity_check:\n \n operation_to_perform = input(\"Please select an operation: \")\n\n num2 = float(input(\"Please enter next number: \"))\n\n call_to_operation = operations[operation_to_perform]\n answer = call_to_operation(num1, num2)\n\n print(f\"{num1} {operation_to_perform} {num2} = {answer}\")\n\n asking_to_continue = input(f\"Press 'y' to continue operations with {answer} and 'n' to start a new calculation: \")\n\n if asking_to_continue == \"n\":\n calculator()\n \n elif asking_to_continue == \"y\":\n num1 = answer\ncalculator()"
},
{
"alpha_fraction": 0.6442687511444092,
"alphanum_fraction": 0.6561264991760254,
"avg_line_length": 13.05555534362793,
"blob_id": "1231b666116f4e4f3e1748d612afb0a901839bb5",
"content_id": "3792d7552b6d06c4ced1800a84c7331bebd1c05a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 506,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 36,
"path": "/100DaysOfCoding/etch-a-sketch/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle, Screen\n\ntim = Turtle()\nscreen = Screen()\n\n\ndef move_forwards():\n tim.forward(10)\n\n\ndef move_backwards():\n tim.backward(10)\n\n\ndef clockwise():\n tim.right(5)\n\n\ndef anti_clockwise():\n tim.left(5)\n\n\ndef clear():\n tim.clear()\n tim.penup()\n tim.home()\n tim.pendown()\n\n\nscreen.listen()\nscreen.onkey(move_forwards, \"w\")\nscreen.onkey(move_backwards, \"s\")\nscreen.onkey(anti_clockwise, \"a\")\nscreen.onkey(clockwise, \"d\")\nscreen.onkey(clear, \"c\")\nscreen.exitonclick()\n"
},
{
"alpha_fraction": 0.6178217530250549,
"alphanum_fraction": 0.6495049595832825,
"avg_line_length": 30.5625,
"blob_id": "74841c816f4b505b470c74cdde3d76fbd0c7f073",
"content_id": "1387072ce39db67c1b68382d8c54df2a2816cf06",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1010,
"license_type": "no_license",
"max_line_length": 163,
"num_lines": 32,
"path": "/100DaysOfCoding/amazon_price_tracker/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import requests\nfrom bs4 import BeautifulSoup\nimport smtplib\n\n\nMY_EMAIL = \"Your Email\"\nMY_PASSWORD = \"Your Password\"\nSITE_URL = \"https://www.amazon.com/Instant-Pot-Duo-Evo-Plus/dp/B07W55DDFB/ref=sr_1_1?qid=1597662463\"\n\nheaders = {\n \"Accept-Language\": \"en-US\",\n \"User-Agent\": \"Chrome/92.0.4515.159\"\n}\n\nresponse = requests.get(url=SITE_URL, headers=headers).text\nsoup = BeautifulSoup(response, \"html.parser\")\nprice_with_dollar = soup.find(name=\"span\", id=\"priceblock_ourprice\").string\nprice = price_with_dollar.split(\"$\")[1]\n\n\nif int(float(price)) < 200:\n with smtplib.SMTP(\"smtp.gmail.com\") as connection:\n connection.starttls()\n connection.login(\n user=MY_EMAIL,\n password=MY_PASSWORD\n )\n connection.sendmail(\n from_addr=MY_EMAIL,\n to_addrs=\"Reciever Email\",\n msg=f\"Subject:Amazon price Alert!\\n\\n{soup.find(name='span', id='productTitle').string} is now available in just ${price}.\\n{SITE_URL}\".encode(\"utf-8\")\n )\n"
},
{
"alpha_fraction": 0.6128769516944885,
"alphanum_fraction": 0.6462917923927307,
"avg_line_length": 22.55769157409668,
"blob_id": "218cb738de0e5d32fede42c392dfc1ed956c7cbc",
"content_id": "460b71bae81f4c097c34ca1855339ced854eb636",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1227,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 52,
"path": "/100DaysOfCoding/pong_game/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Screen\nfrom paddle import Paddle\nfrom ball import Ball\nimport time\nfrom scoreboard import Score\n\nscore = Score()\nscreen = Screen()\nscreen.setup(800, 600)\nscreen.bgcolor(\"black\")\nscreen.title(\"Pong\")\nscreen.tracer(0)\n\n\nr_paddle = Paddle((360, 0))\nl_paddle = Paddle((-360, 0))\nball = Ball()\n\nscreen.listen()\nscreen.onkey(r_paddle.right_go_up, \"Up\")\nscreen.onkey(r_paddle.right_go_down, \"Down\")\nscreen.onkey(l_paddle.left_go_up, \"w\")\nscreen.onkey(l_paddle.left_go_down, \"s\")\n\n\ngame_is_on = True\nwhile game_is_on:\n time.sleep(ball.move_speed)\n screen.update()\n ball.move()\n\n# Detect Collision with the Upper and Lower Walls:\n if ball.ycor() > 280 or ball.ycor() < -280:\n ball.y_bounce()\n\n# Detect Collision with the paddle:\n if ball.distance(r_paddle) < 50 and ball.xcor() > 320 or ball.distance(l_paddle) < 50 and ball.xcor() < -320:\n ball.x_bounce()\n\n# Detection of miss:\n if ball.distance(r_paddle) > 50 and ball.xcor() > 390:\n ball.paddle_misses()\n score.l_point()\n score.update()\n\n if ball.distance(l_paddle) > 50 and ball.xcor() < -390:\n ball.paddle_misses()\n score.r_point()\n score.update()\n\n\nscreen.exitonclick()\n\n\n"
},
{
"alpha_fraction": 0.5950155854225159,
"alphanum_fraction": 0.6012461185455322,
"avg_line_length": 27.954545974731445,
"blob_id": "d54814641f2a284d3887ee7078b70b4d7f803a20",
"content_id": "c5e7235e6f054e825f7e08831c70a08f741f44ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 642,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 22,
"path": "/100DaysOfCoding/issoverhead-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import smtplib\nimport time\nfrom config import iss_position, sunset_sunrise\n\n\nMY_EMAIL = \"[email protected]\"\nMY_PASSWORD = \"abcxyz123789\"\n\n\n# BONUS: run the code every 60 seconds.\nwhile True:\n time.sleep(60)\n if iss_position() and sunset_sunrise():\n # Then send me an email to tell me to look up.\n with smtplib.SMTP(\"smtp.gmail.com\") as connection:\n connection.starttls()\n connection.login(user=MY_EMAIL, password=MY_PASSWORD)\n connection.sendmail(\n from_addr=MY_EMAIL,\n to_addrs=MY_EMAIL,\n msg=\"Subject:ISS Overhead\\n\\nISS is Above you.\"\n )\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.477477490901947,
"alphanum_fraction": 0.5135135054588318,
"avg_line_length": 12.75,
"blob_id": "ed4dbcc2b0df57d5f9a25ad21048971093b072bc",
"content_id": "95f5227c525a9704208b5cf0b053b5db5fab6d72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 111,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 8,
"path": "/100DaysOfCoding/Day-27-Tkinter/playground.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "def add(*args):\n result = 0\n for n in args:\n result += n\n return result\n\n\nprint(add(2, 5, 6))\n\n"
},
{
"alpha_fraction": 0.5039034485816956,
"alphanum_fraction": 0.5223562717437744,
"avg_line_length": 29.65217399597168,
"blob_id": "87ca8d7c0fa9f3f324d2cd042bdfeb78c6e66287",
"content_id": "b37b9c1c5b56000e6f37621c66f3b905b3ecff47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1409,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 46,
"path": "/Black-Jack-Capstone/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from art import logo\nimport random\n\ncards = [11,2,3,4,5,6,7,8,9,10,10,10,10]\nuser_cards = []\ncomputer_cards = []\n\nask = input(\"Do you wanna play Black-Jack: type 'y' for Yes and 'n' for no. \")\nif ask == \"y\":\n restart = True\n\n while restart:\n print (logo)\n \n def pick_cards():\n for _ in range(2):\n random_card = random.choice(cards)\n user_cards.append(random_card)\n comp_random_card = random.choice(cards)\n computer_cards.append(comp_random_card)\n\n def cards_dealing():\n \n for card in user_cards:\n user_score += card\n\n for card in computer_cards:\n computer_score += card\n\n if user_score == 21:\n print(\"You've got a BLACK-JACK. You Win.\")\n elif computer_score == 21:\n print(\"Computer got a BLACK-JACK. You Lose.\")\n \n pick_cards()\n cards_dealing()\n user_score = 0\n computer_score = 0\n print(f\" Your cards: {user_cards}, current score: {user_score}\")\n print(f\" Computer's first Cards: {computer_cards[0]}\")\n\n user_decision = input(\"Type 'y' to get another card, type 'n' to pass: \")\n\n play_again = input(\"Do you want to play a game of Balck-Jack? Type 'y' or 'n': \")\n if play_again == 'n':\n restart = False"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.7589285969734192,
"avg_line_length": 39,
"blob_id": "33959078ff285c8a315432d99b45ab5ba5a18d0e",
"content_id": "0c40b84ef3e1d426365f8639b6dd1fd9683e7b23",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 560,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 14,
"path": "/100DaysOfCoding/Day-48/filling_forms.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\n\nchrome_driver_path = \"/snap/bin/chromium.chromedriver\"\ndriver = webdriver.Chrome(executable_path=chrome_driver_path)\ndriver.get(\"http://secure-retreat-92358.herokuapp.com/\")\nf_name = driver.find_element_by_name(\"fName\")\nl_name = driver.find_element_by_name(\"lName\")\nemail = driver.find_element_by_name(\"email\")\nf_name.send_keys(\"Moazzam\")\nl_name.send_keys(\"Khan\")\nemail.send_keys(\"[email protected]\")\nsign_up_btn = driver.find_element_by_class_name(\"btn\")\nsign_up_btn.send_keys(Keys.ENTER)\n"
},
{
"alpha_fraction": 0.5644333958625793,
"alphanum_fraction": 0.5826302766799927,
"avg_line_length": 34.08720779418945,
"blob_id": "2cf1657d071b6c957bbd9f745081d3b92835fb9e",
"content_id": "30dff73df016a0e0294f1a6150bb11aba1042130",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6047,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 172,
"path": "/100DaysOfCoding/Coffee-Machine/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "MENU = {\n \"espresso\": {\n \"ingredients\": {\n \"water\": 50,\n \"coffee\": 18,\n },\n \"cost\": 1.5,\n },\n \"latte\": {\n \"ingredients\": {\n \"water\": 200,\n \"milk\": 150,\n \"coffee\": 24,\n },\n \"cost\": 2.5,\n },\n \"cappuccino\": {\n \"ingredients\": {\n \"water\": 250,\n \"milk\": 100,\n \"coffee\": 24,\n },\n \"cost\": 3.0,\n }\n}\n\nresources = {\n \"water\": 300,\n \"milk\": 200,\n \"coffee\": 100,\n}\n\nprofit = 0\n\n\ndef report():\n \"\"\"Generate a report when called.\"\"\"\n for key in resources:\n if key == \"coffee\":\n print(f\"{key}:{resources[key]}g\")\n else:\n print(f\"{key}:{resources[key]}ml\")\n print(f\"Money: ${profit}\")\n\n\ndef coin_total(quarters_in, dimes_in, nickles_in, pennies_in):\n \"\"\"Returns the total of inserted coins.\"\"\"\n return (0.25 * quarters_in) + (0.1 * dimes_in) + (0.05 * nickles_in) + (0.01 * pennies_in)\n\n\ndef check_resources(coffee):\n \"\"\"Returns True if the resources are sufficient in machine and False if resources are not sufficient.\"\"\"\n if coffee == \"espresso\":\n if resources[\"water\"] < 50:\n print(\"Sorry there is not enough water.\")\n return False\n elif resources[\"coffee\"] < 18:\n print(\"Sorry there is not enough Coffee.\")\n return False\n else:\n return True\n elif coffee == \"latte\":\n if resources[\"water\"] < 200:\n print(\"Sorry there is not enough water.\")\n return False\n elif resources[\"milk\"] < 150:\n print(\"Sorry there is not enough milk.\")\n return False\n elif resources[\"coffee\"] < 24:\n print(\"Sorry there is not enough Coffee.\")\n return False\n else:\n return True\n elif coffee == \"cappuccino\":\n if resources[\"water\"] < 250:\n print(\"Sorry there is not enough water.\")\n return False\n elif resources[\"milk\"] < 100:\n print(\"Sorry there is not enough milk.\")\n return False\n elif resources[\"coffee\"] < 24:\n print(\"Sorry there is not enough Coffee.\")\n return False\n else:\n return True\n\n\ndef compare_cost(user_choice, monetary_value_in):\n \"\"\"Returns True if the inserted coins are enough to buy coffee and False if the inserted coins are not enough.\"\"\"\n if monetary_value_in < MENU[user_choice][\"cost\"]:\n return False\n else:\n return True\n\n\ndef transaction(user_input):\n \"\"\"Returns the monetary Value. 0 if the resources are not enough.\"\"\"\n # TODO: 4. check the resources sufficient for the asked product.\n # TODO: 4.1. check all three resources i.e. water, milk and coffee.\n # TODO: 4.2. print the message of unavailability of each resource separately.\n continuity = check_resources(user_input)\n if continuity:\n # TODO: 5. Process Coins.\n print(\"Please insert Coins.\")\n # TODO: 5.2. Check inserted coins and store its values. Quarter = $0.25, Dimes = $0.10, nickles = $0.05, pennies = $0.01\n quarters = int(input(\"How many quarters?: \"))\n dimes = int(input(\"How many dimes?: \"))\n nickles = int(input(\"How many nickles?: \"))\n pennies = int(input(\"How many pennies?: \"))\n # TODO: 5.3. Calculate the total value by multiplying the inserted coins with each coin value and adding all of them.\n monetary_value = coin_total(quarters, dimes, nickles, pennies)\n return monetary_value\n elif not continuity:\n return 0\n\n\ndef return_money():\n \"\"\"Returns the amount that returned to the machine user with roundup to two decimal places.\"\"\"\n return_amount = monetary_value_main - MENU[ask_user][\"cost\"]\n return round(return_amount, 2)\n\n\ndef deduct_resources(user_input):\n \"\"\"Deduct the resources from the resources dictionary.\"\"\"\n if user_input == \"espresso\":\n resources[\"water\"] -= MENU[user_input][\"ingredients\"][\"water\"]\n resources[\"coffee\"] -= MENU[user_input][\"ingredients\"][\"coffee\"]\n else:\n resources[\"milk\"] -= MENU[user_input][\"ingredients\"][\"milk\"]\n resources[\"water\"] -= MENU[user_input][\"ingredients\"][\"water\"]\n resources[\"coffee\"] -= MENU[user_input][\"ingredients\"][\"coffee\"]\n\n\n# TODO: 1.2. Repeat asking everytime once the drink is dispensed.\ncheck_for_off = False\n\n\nwhile not check_for_off:\n\n # TODO: 1. Ask user for the options available.\n ask_user = input(\"What would you like? (espresso/latte/cappuccino): \")\n # TODO: 2. check for 'off' string if inserted then turn of the machine.\n if ask_user == \"off\":\n\n check_for_off = True\n # TODO: 3. Print report of the resources available in machine.\n elif ask_user == \"report\":\n report()\n # TODO: 1.1. Check the user input to decide what to do next.\n elif ask_user == \"espresso\" or ask_user == \"latte\" or ask_user == \"cappuccino\":\n\n monetary_value_main = transaction(ask_user)\n if monetary_value_main != 0:\n\n\n # TODO: 6. Check for the transaction.\n # TODO: 6.1. Compare the total value of the inserted coins with the coffee value and make a decision.\n decision = compare_cost(ask_user, monetary_value_main)\n\n # TODO: 6.2. if user inserted enough money then add the cost of drink must be added in machine as a profit.\n if decision:\n\n profit += MENU[ask_user][\"cost\"]\n # TODO: 7.1. If transaction was successful and resources are sufficient then deduct the resources available before making.\n deduct_resources(ask_user)\n else:\n print(\"Sorry that's not enough Money. Money refunded\")\n\n # TODO: 6.3. If user has inserted extra money then it should be calculated and returned.\n print(f\"Here is ${return_money()} dollars in change.\")\n # TODO: 7. Make Coffee.\n print(f\"Here is your {ask_user} ☕. Enjoy!\")\n\n\n\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.5441666841506958,
"alphanum_fraction": 0.5541666746139526,
"avg_line_length": 26.272727966308594,
"blob_id": "cc6ec37ae5ee900175e0a14ac765178f864dbc8c",
"content_id": "77d0ffad2057126998d4d43a46080aad094d10ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1200,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 44,
"path": "/100DaysOfCoding/snake_game/scoreboard.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle\nALIGNMENT = \"center\"\nFONT = (\"Arial\", 20, \"normal\")\nCOLOR = \"white\"\nPOSITION = (0, 270)\n\n\nclass Score(Turtle):\n def __init__(self):\n super().__init__()\n self.clear()\n self.score = 0\n self.high_score = 0\n self.update_high_score()\n self.penup()\n self.hideturtle()\n self.color(COLOR)\n self.goto(POSITION)\n self.update_score()\n\n def update_high_score(self):\n with open(\"data.txt\") as file:\n self.high_score = int(file.read())\n\n def update_score(self):\n self.clear()\n self.write(f\"Score : {self.score} High Score : {self.high_score}\", move=False, align=ALIGNMENT, font=FONT)\n\n def reset(self):\n if self.score > self.high_score:\n self.high_score = self.score\n with open(\"data.txt\", mode=\"w\") as file:\n file.write(f\"{self.score}\")\n self.score = 0\n self.update_score()\n self.update_high_score()\n\n # def game_over(self):\n # self.goto(0, 0)\n # self.write(f\"GAME OVER.\", move=False, align=ALIGNMENT, font=FONT)\n\n def increment(self):\n self.score += 1\n self.update_score()\n"
},
{
"alpha_fraction": 0.7405660152435303,
"alphanum_fraction": 0.7594339847564697,
"avg_line_length": 27.200000762939453,
"blob_id": "9bd4e4cd716dd0e8f84ccc46432dffb3623b0197",
"content_id": "817461bb9fcf01dc8cc67e3080ee540d9009d749",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 424,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 15,
"path": "/100DaysOfCoding/Day-51-Twitter-Complaint-Bot/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from internet_speed_bot import InternetSpeedTwitterBot\nimport time\n\nPROMISED_DOWN = 150.0\nPROMISED_UP = 10.0\nCHROME_PATH_URL = \"/snap/bin/chromium.chromedriver\"\nTWITTER_EMAIL = \"Your Twitter Email\"\nTWITTER_PASSWORD = \"Your Twitter Password\"\n\nbot = InternetSpeedTwitterBot(CHROME_PATH_URL)\n\nbot.get_internet_speed()\ntime.sleep(1)\nif bot.up < PROMISED_UP or bot.down < PROMISED_DOWN:\n bot.tweet_at_provider(TWITTER_EMAIL, TWITTER_PASSWORD)\n\n"
},
{
"alpha_fraction": 0.6287816762924194,
"alphanum_fraction": 0.6369583010673523,
"avg_line_length": 27.44186019897461,
"blob_id": "f8a80970c0298ec23dfe0c58a669f5b747bff3b4",
"content_id": "9965e4e1f3b1e80cea2587c4d57097f6e69cee74",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1223,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 43,
"path": "/100DaysOfCoding/us-states-game-start/us-states-game-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import turtle\nimport pandas\n\nscreen = turtle.Screen()\nscreen.title(\"U.S States Game\")\nimage = \"blank_states_img.gif\"\nscreen.addshape(image)\n\n\nturtle.shape(image)\nname_turtle = turtle.Turtle()\nname_turtle.hideturtle()\nname_turtle.penup()\nstates_data = pandas.read_csv(\"50_states.csv\")\nstates_in_list = states_data[\"state\"].to_list()\n\n\ncount = 0\ngame_continue = True\nguessed_states = []\n\nwhile game_continue:\n user_guess = screen.textinput(title=f\"{count}/50 Guess the States\", prompt=\"What's your guess?\").title()\n count += 1\n s_no = -1\n if user_guess == \"Exit\":\n states_to_learn = [state for state in states_in_list if state not in guessed_states]\n break\n for state in states_in_list:\n s_no += 1\n if user_guess == state:\n guessed_states.append(state)\n state_row = states_data[states_data.state == state]\n state_row_dict = state_row.to_dict()\n # print(state_row_dict[\"x\"])\n name_turtle.goto(int(state_row.x), int(state_row.y))\n name_turtle.write(state, align=\"center\")\n if count == 50:\n game_continue = False\n\n\ndataframe = pandas.DataFrame(states_to_learn)\ndataframe.to_csv(\"states_to_learn.csv\")\n"
},
{
"alpha_fraction": 0.526822566986084,
"alphanum_fraction": 0.5405777096748352,
"avg_line_length": 22.45161247253418,
"blob_id": "f29a3b07c88bd13770f39cac6ae1c1849ceb4e4d",
"content_id": "646b8db27bde9985219c19a7b2c03ea705238eb7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 727,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 31,
"path": "/100DaysOfCoding/pong_game/paddle.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle\nLOCATION = ()\n\n\nclass Paddle(Turtle):\n def __init__(self, location):\n super().__init__()\n self.create_paddle(location)\n\n def create_paddle(self, position):\n self.color(\"white\")\n self.shape(\"square\")\n self.shapesize(stretch_wid=5, stretch_len=1)\n self.penup()\n self.goto(position)\n\n def right_go_up(self):\n y = self.ycor() + 20\n self.goto(self.xcor(), y)\n\n def right_go_down(self):\n y = self.ycor() - 20\n self.goto(self.xcor(), y)\n\n def left_go_up(self):\n y = self.ycor() + 20\n self.goto(self.xcor(), y)\n\n def left_go_down(self):\n y = self.ycor() - 20\n self.goto(self.xcor(), y)\n"
},
{
"alpha_fraction": 0.6178229451179504,
"alphanum_fraction": 0.6244019269943237,
"avg_line_length": 26.409835815429688,
"blob_id": "679bcf012f4e5f4ca8238e5ce1779eb2e21742ab",
"content_id": "3947c89971b026b7b8d725f614cf82bf4d931ff5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1672,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 61,
"path": "/100DaysOfCoding/Birthday+Wisher+(Day+32)+start/Birthday Wisher (Day 32) start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# import smtplib\n#\n# my_yahoo_email = \"[email protected]\"\n# my_yahoo_password = \"spuyicudqgavusft\"\n# my_gmail_email = \"[email protected]\"\n# my_gmail_password = \"abcxyz123789\"\n#\n# with smtplib.SMTP(\"smtp.mail.yahoo.com\") as connection:\n# connection.starttls()\n# connection.login(user=my_yahoo_email, password=my_yahoo_password)\n# connection.sendmail(from_addr=my_yahoo_email,\n# to_addrs=my_gmail_email,\n# msg=\"Subject:Sending from Python yahoo\\n\\nHello,\\nTea pilo.\")\n#\n# with smtplib.SMTP(\"smtp.gmail.com\") as connection:\n# connection.starttls()\n# connection.login(user=my_gmail_email,\n# password=my_gmail_password)\n# connection.sendmail(from_addr=my_gmail_email,\n# to_addrs=my_yahoo_email,\n# msg=\"Subject:Sending from Python Gmail\\n\\nHello,\\nTea Pilo.\")\n#\n\n\n# import datetime as dt\n#\n# now = dt.datetime.now()\n# year = now.year\n# micro = now.microsecond\n# print(micro)\n#\n# date_of_birth = dt.datetime(year=1990, month=8, day=3, hour=23, minute=29)\n# print(date_of_birth)\n\nimport smtplib\nimport datetime as dt\nimport random\n\n\nquotes = []\nmy_email = \"[email protected]\"\nmy_password = \"abcxyz123789\"\n\n\nnow = dt.datetime.now()\nweekday = now.weekday()\n\n\nwith open(\"quotes.txt\") as data:\n for quote in data:\n quotes.append(quote)\n\n\nrandom_quote = random.choice(quotes)\n\nif weekday == 0:\n with smtplib.SMTP(\"smtp.gmail.com\") as connection:\n connection.starttls()\n connection.login(user=my_email, password=my_password)\n connection.sendmail(from_addr=my_email, to_addrs=\"[email protected]\",\n msg=f\"Subject:Motivational Quotes\\n\\n{random_quote}\")\n"
},
{
"alpha_fraction": 0.6950998306274414,
"alphanum_fraction": 0.7011494040489197,
"avg_line_length": 34.17021179199219,
"blob_id": "4e25a9dd3581ac7749d633ef4181a0aee1d60ceb",
"content_id": "3b2ae03f97ed47674f12365d7426d05ab06671e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1653,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 47,
"path": "/100DaysOfCoding/Day46-Spotify_Playlist/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from pprint import pprint\nfrom bs4 import BeautifulSoup\nimport requests\nimport spotipy\nfrom spotipy.oauth2 import SpotifyOAuth\n\n\nSPOTIFY_CLIENT_ID = \"Your Spotify Client ID\"\nSPOTIFY_CLIENT_SECRET = \"Your client Secret\"\nuser_input = input(\"Which year do you want to travel to? Type the date in this format YYYY-MM-DD?: \")\nyear_list = user_input.split(\"-\")\n# print(year_list)\nresponse = requests.get(f\"https://www.billboard.com/charts/hot-100/{user_input}\").text\n\nsoup = BeautifulSoup(response, \"html.parser\")\n# print(soup.prettify())\nsongs_span_list = soup.find_all(name=\"span\", class_=\"chart-element__information__song\")\n# print(songs_span_list)\nsongs_title_list = [song_title.getText() for song_title in soup.find_all(name=\"span\", class_=\"chart-element__information__song\")]\n# print(songs_title_list)\nsp = spotipy.Spotify(auth_manager=SpotifyOAuth(\n client_id=SPOTIFY_CLIENT_ID,\n client_secret=SPOTIFY_CLIENT_SECRET,\n redirect_uri=\"http://example.com\",\n scope=\"playlist-modify-private\",\n show_dialog=True,\n cache_path=\"token.txt\"\n))\n\nuser_id = sp.current_user()[\"id\"]\n\nsongs_uri = []\nfor song in songs_title_list:\n result = sp.search(q=f\"track:{song} year:{year_list[0]}\", type=\"track\")\n # pprint(result)\n try:\n uri = result[\"tracks\"][\"items\"][0][\"uri\"]\n songs_uri.append(uri)\n except IndexError:\n pass\n # print(f\"{songs_title_list} doesn't exist in Spotify. Skipped.\")\n # print(songs_uri)\n\n\nplaylist = sp.user_playlist_create(user=sp.current_user()[\"id\"], name=f\"{user_input} Billboard 100\", public=False)\npprint(playlist)\nsp.playlist_add_items(playlist_id=playlist[\"id\"], items=songs_uri)\n"
},
{
"alpha_fraction": 0.582608699798584,
"alphanum_fraction": 0.6245059370994568,
"avg_line_length": 23.326923370361328,
"blob_id": "ae2724ebed7e15bf0309bd44f4ba602720f9926a",
"content_id": "4ed5fa6d1389b4a9cb86856479ebf97a04f71640",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1265,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 52,
"path": "/100DaysOfCoding/turtle-crossing-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import time\nfrom turtle import Screen\nfrom player import Player\nfrom car_manager import CarManager\nfrom scoreboard import Scoreboard\n\nSINGLE_PLAYER = (0, -280)\nPLAYER_1 = (150, -280)\nPLAYER_2 = (-150, -280)\nscreen = Screen()\nscreen.setup(width=600, height=600)\nscreen.tracer(0)\n\n\nturtle1 = Player(PLAYER_1)\nturtle2 = Player(PLAYER_2)\nscreen.listen()\nscreen.onkey(turtle1.move, \"Up\")\nscreen.onkey(turtle2.move, \"w\")\ncar = CarManager()\nscore = Scoreboard()\n\ngame_over = False\ngame_is_on = True\nwhile game_is_on:\n time.sleep(0.1)\n car.move_car()\n car.init_car()\n # Detect if player cross successfully:\n if turtle1.ycor() > 280:\n turtle1.init_turtle(PLAYER_1)\n score.update_scoreboard()\n car.speed_increment()\n # Detect the collision:\n for c in car.car_list:\n if turtle1.distance(c) < 20:\n game_over = True\n while game_over:\n score.game_over(1)\n\n if turtle2.ycor() > 280:\n turtle2.init_turtle(PLAYER_2)\n score.update_scoreboard()\n car.speed_increment()\n # Detect the collision:\n for c in car.car_list:\n if turtle2.distance(c) < 20:\n game_over = True\n while game_over:\n score.game_over(2)\n\n screen.update()\n"
},
{
"alpha_fraction": 0.7549019455909729,
"alphanum_fraction": 0.758169949054718,
"avg_line_length": 37.25,
"blob_id": "af7ce20dc188d89ddbc035e701652b8404ce7a2f",
"content_id": "a3f7d536ee335a3ee6566db18659b3ee685f4901",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 306,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 8,
"path": "/100DaysOfCoding/Day-48/interaction.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\n\nchrome_driver_path = \"/snap/bin/chromium.chromedriver\"\ndriver = webdriver.Chrome(executable_path=chrome_driver_path)\ndriver.get(\"https://en.wikipedia.org/wiki/Main_Page\")\nnumber = driver.find_element_by_xpath('//*[@id=\"articlecount\"]/a[1]')\nprint(number.text)\ndriver.quit()\n"
},
{
"alpha_fraction": 0.6326737403869629,
"alphanum_fraction": 0.6397767663002014,
"avg_line_length": 26.760562896728516,
"blob_id": "62a7176e39d7d34f79eb082a5432a9c5050575ce",
"content_id": "92dbad7370477cf4537c3203803c720674205b31",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1971,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 71,
"path": "/100DaysOfCoding/Working_On_CSV_files/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# import csv\n#\n#\n# # with open(\"weather_data.csv\") as weather_data:\n# # data = weather_data.readlines()\n# # for stripped_data in data:\n# # stripped_data.strip()\n# # print(stripped_data)\n# # print(data)\n#\n# # Data Extraction using csv:\n# # with open(\"weather_data.csv\") as weather_data:\n# # # .reader returns the object with each row in a list format. We can loop through it.\n# # data = csv.reader(weather_data)\n# # temperature = []\n# # loop_count = 0\n# # for row in data:\n# # loop_count += 1\n# # if loop_count > 1:\n# # temperature.append(int(row[1]))\n# # # print(row)\n# # print(temperature)\n#\n#\n# # Data Extraction Using Pandas:\n# import pandas\n# data = pandas.read_csv(\"weather_data.csv\")\n# # print(data[\"condition\"])\n#\n# # Converting dataframe into dictionary format:\n# data_in_dict_format = data.to_dict()\n# print(data_in_dict_format)\n#\n# # Converting series or column in list format:\n# data_in_list_format = data[\"temp\"].to_list()\n# print(data_in_list_format)\n#\n# # Calculating average temperature of the week without using pandas:\n# total_days = len(data_in_list_format)\n# total_temp = sum(data_in_list_format)\n#\n# average_temp = total_temp/total_days\n# print(average_temp)\n#\n# # Calculating average temperature using pandas:\n# print(data[\"temp\"].mean())\n#\n# # getting maximum temperature from file using Pandas:\n# print(data[\"temp\"].max())\n#\n# # getting data from row in pandas:\n# print(data[data.temp == data.temp.max()])\n#\n# # getting row of Monday:\n# monday = data[data.day == \"Monday\"]\n# in_fahrenheit = ((int(monday.temp) + 9)/5) + 32\n# print(in_fahrenheit)\n\nimport pandas\n\n# creating csv from pandas:\ndata_dict = {\n \"students\" : [\"Moazzam\", \"Adil\", \"Khan\"],\n \"scores\": [34, 24, 46],\n}\n\ndata = pandas.DataFrame(data_dict)\ndata.to_csv(\"testing_fie.csv\")\n\nstudents_data = pandas.read_csv(\"testing_fie.csv\")\nprint(students_data[students_data[\"students\"] == \"Moazzam\"])\n"
},
{
"alpha_fraction": 0.6079664826393127,
"alphanum_fraction": 0.6100628972053528,
"avg_line_length": 33.14285659790039,
"blob_id": "ec31c3d24b5033044ad2d5aa6e87f552ce9f39b0",
"content_id": "47cb3f6ae0a11b2866b5130a3058e115e49e50ac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 477,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 14,
"path": "/100DaysOfCoding/Caeser Cypher/caeser_cypher_functions.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from caeser_cypher_art import alphabet\n\ndef caeser(text_input, shifting, direction_check):\n end_result = \"\"\n if direction_check == \"decode\":\n shifting *= -1\n for letter in text_input:\n if letter in alphabet:\n position = alphabet.index(letter)\n new_position = position + shifting\n end_result += alphabet[new_position]\n else:\n end_result += letter\n print(f\"The {direction_check}d text is {end_result}\")"
},
{
"alpha_fraction": 0.6323730945587158,
"alphanum_fraction": 0.6844993233680725,
"avg_line_length": 18.1842098236084,
"blob_id": "56fe827e3266d6f6095fe78a33d02983362dd17b",
"content_id": "6577e333eed99c61d1be14f07ee14653ef1edcee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 729,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 38,
"path": "/100DaysOfCoding/Miles_to_KM_converter/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from tkinter import *\n\n\ndef calculation():\n miles = float(user_input.get())\n km = miles * 1.609\n label_4.config(text=km)\n\n\nwindow = Tk()\nwindow.title(\"Miles to Kilo Meter Converter\")\nwindow.minsize(width=100, height=100)\n\n# User Input:\nuser_input = Entry(width=10)\nuser_input.grid(column=1, row=0)\n\n# button:\nbutton = Button(text=\"Calculate\", activebackground=\"blue\", command=calculation)\nbutton.grid(column=1, row=2)\n\n# label 1\nlabel_1 = Label(text=\"is equal to\")\nlabel_1.grid(column=0, row=1)\n\n# label 2\nlabel_2 = Label(text=\"Miles\")\nlabel_2.grid(column=2, row=0)\n\n# label 3\nlabel_3 = Label(text=\"KM\")\nlabel_3.grid(column=2, row=1)\n\n# label 4\nlabel_4 = Label(text=\"0\")\nlabel_4.grid(column=1, row=1)\n\nwindow.mainloop()\n"
},
{
"alpha_fraction": 0.5961244106292725,
"alphanum_fraction": 0.601223886013031,
"avg_line_length": 31.147541046142578,
"blob_id": "d402f5c44f2450b01b061744539f3c5d653c438b",
"content_id": "34dbb4450932a68c1c46e1cc05461bd734583cb8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1961,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 61,
"path": "/100DaysOfCoding/cookie_bot/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\nimport time\n\n# Setup of Selenium:\nchrome_driver_url = \"/snap/bin/chromium.chromedriver\"\ndriver = webdriver.Chrome(chrome_driver_url)\ndriver.get(\"http://orteil.dashnet.org/experiments/cookie/\")\n\n# finding cookie:\ncookie_element = driver.find_element_by_id(\"cookie\")\n# cookie_element.click()\n\n# getting item id's:\nitems = driver.find_elements_by_css_selector(\"#store div\")\nitems_ids = [item.get_attribute(\"id\") for item in items]\nitems_ids.pop()\nitems_ids.reverse()\nprint(items_ids)\n\n# Wait 1 secs and remove cookie popup:\ntime.sleep(1)\ncc_banner = driver.find_element_by_css_selector(\"div.cc_container a\")\ncc_banner.click()\n\ngame_on = True\ncheck = time.time() + 5\ngame_end = time.time() + (60*5)\n\nwhile game_on:\n cookie_element.click()\n if time.time() > check:\n # getting prices of items:\n items_price_elements = driver.find_elements_by_css_selector(\"#store b\")\n items_price_elements.pop()\n items_price_elements.reverse()\n items_prices = [int(element_text.text.split(\"-\")[1].strip().replace(\",\", \"\")) for element_text in items_price_elements]\n # print(items_prices)\n try:\n # finding money we have:\n money = str(driver.find_element_by_id(\"money\").text)\n if \",\" in money:\n money.replace(\",\", \"\")\n print(money)\n cookie_count = int(money)\n print(cookie_count)\n except ValueError:\n pass\n try:\n for n in range(len(items_prices)-1):\n if cookie_count > items_prices[n]:\n buy = driver.find_element_by_id(items_ids[n])\n # print(buy)\n buy.click()\n except Exception:\n pass\n check += 5\n\n if time.time() > game_end:\n cookies_per_second = driver.find_element_by_id(\"cps\")\n print(f\"Cookies/Second: {cookies_per_second.text.split()[2]}\")\n game_on = False\n"
},
{
"alpha_fraction": 0.6720098853111267,
"alphanum_fraction": 0.6942046880722046,
"avg_line_length": 20.91891860961914,
"blob_id": "d02c6bdc7c1a22d8ba4d4091c758f630280fbd15",
"content_id": "1bbb8ca5aae9e93fb945647e56fe44a24b1cef1d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 811,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 37,
"path": "/100DaysOfCoding/Day-27-Tkinter/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# import tkinter\nfrom tkinter import *\n\n\ndef button_clicked():\n enter = user_input.get()\n my_label.config(text=enter)\n\n\ndef second_button():\n status = new_button.get()\n print(status)\n\n\nwindow = Tk()\nwindow.title(\"My first GUI Program\")\nwindow.minsize(width=600, height=450)\n\n# Label\nmy_label = Label(text=\"I am a Label\", font=(\"Arial\", 20, \"italic\"))\nmy_label.grid(column=0, row=0)\n# my_label.config(text=\"New Text\")\nmy_label[\"text\"] = \"New Text\"\n\n# Button:\nbutton = Button(text=\"Click Me\", command=button_clicked, background=\"red\", activebackground=\"blue\")\nbutton.grid(column=1, row=1)\n\n# Second Button:\nnew_button = Button(text=\"Click Me Please\", activebackground=\"yellow\")\nnew_button.grid(column=2, row=0)\n\n# Input\nuser_input = Entry(width=10)\nuser_input.grid(column=3, row=3)\n\nwindow.mainloop()\n"
},
{
"alpha_fraction": 0.6919889450073242,
"alphanum_fraction": 0.6984346508979797,
"avg_line_length": 29.591548919677734,
"blob_id": "626f3a1a278cb4504b15d8820f0e499cc393e700",
"content_id": "da206debf1a1a9473c37993c3a76adae2a49d390",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2172,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 71,
"path": "/100DaysOfCoding/bs4-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from bs4 import BeautifulSoup\nimport requests\n\nresponse = requests.get(\"https://news.ycombinator.com/\")\n\nsoup = BeautifulSoup(response.text, \"html.parser\")\narticles = soup.find_all(name=\"a\", class_=\"storylink\")\ntexts = [article_tag.getText() for article_tag in articles]\nlinks = [article_tag.get(\"href\") for article_tag in articles]\nupvotes = [int(upvotes.string.split()[0]) for upvotes in soup.find_all(name=\"span\", class_=\"score\")]\n\nhighest_votes = max(upvotes)\nhighest_index = upvotes.index(highest_votes)\nprint(highest_votes)\n# print(highest_index)\narticle_name = texts[highest_index]\narticle_link = links[highest_index]\nprint(article_name)\nprint(article_link)\n# print(texts)\n# print(links)\n# print(upvotes)\n\n\nlist_of_span_tags = soup.find_all(name=\"span\", class_=\"score\")\n# print(list_of_span_tags)\npoints_list = []\nfor value in list_of_span_tags:\n # print(value.string)\n points = value.string\n points_list.append(points)\n\n# with open(\"website.html\") as file:\n# data = file.read()\n# # Creating a soup from data from file.\n# soup = BeautifulSoup(data, \"html.parser\")\n# # printing title tag.\n# print(soup.title)\n# # printing name of title tag.\n# print(soup.title.name)\n# # print data inside title tag.\n# print(soup.title.string)\n# # formatting soup.\n# print(soup.prettify())\n# # printing all anchor tags.\n# print(soup.a)\n# # printing all lists.\n# print(soup.li)\n# # returning all anchor tags in a list\n# all_anchors = soup.find_all(name=\"a\")\n# # looping through anchor tags lists.\n# for tag in all_anchors:\n# # printing content inside anchor tag.\n# print(tag.getText())\n# # printing link inside anchor tag.\n# print(tag.get(\"href\"))\n# # returning h1 tan with id name.\n# heading1 = soup.find(name=\"h1\", id=\"name\")\n# print(heading1)\n# # returning h3 tag with class heading,\n# heading3 = soup.find(name=\"h3\", class_=\"heading\")\n# print(heading3)\n# # returning all tags with id name.\n# h1 = soup.select_one(selector=\"#name\")\n# print(h1)\n# # returning all tags with class heading.\n# h3 = soup.select(selector=\".heading\")\n# print(h3)\n# # returning all anchor tags inside p (paragraph)\n# a_inside_p = soup.select(selector=\"p a\")\n# print(a_inside_p)\n"
},
{
"alpha_fraction": 0.5868725776672363,
"alphanum_fraction": 0.5945945978164673,
"avg_line_length": 27.66666603088379,
"blob_id": "caf9240eca4b5b796174bb883ece1d9526de32ec",
"content_id": "35c6c4bb7232480636bff0046c5328b01bdcb095",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 259,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 9,
"path": "/100DaysOfCoding/Linux DemoPath/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# Absolute File Path:\nwith open(\"/home/moazzam/Desktop/NCTP/name.txt\", mode=\"r+\") as file:\n test = file.read()\n print(test)\n\n# Relative File Path:\nwith open(\"./../../../../../NCTP/name.txt\", mode=\"r+\") as file:\n test1 = file.read()\n print(test1)\n\n"
},
{
"alpha_fraction": 0.6054006814956665,
"alphanum_fraction": 0.6332752704620361,
"avg_line_length": 27.700000762939453,
"blob_id": "ab506abe54cd8af4d16c55fb0de28b9f079cad69",
"content_id": "8a06bba155201e5819c1de689d63ff8b6e1475b8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1148,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 40,
"path": "/100DaysOfCoding/turtle-race/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle, Screen\nimport random\n\n\nis_race_on = False\nscreen = Screen()\nscreen.setup(width=600, height=500)\nuser_input = screen.textinput(title=\"Make your bet\", prompt=\"Select your turtle by entering color: \").lower()\ncolor = [\"red\", \"green\", \"blue\", \"yellow\", \"orange\", \"purple\"]\ny_position = [-200, -120, -40, 40, 120, 200]\nall_turtles = []\n\n\nfor turtle_index in range(6):\n new_turtle = Turtle(shape=\"turtle\")\n new_turtle.color(color[turtle_index])\n new_turtle.penup()\n new_turtle.goto(x=-280, y=y_position[turtle_index])\n all_turtles.append(new_turtle)\n\n\nif user_input:\n is_race_on = True\n\n\nwhile is_race_on:\n for turtle in all_turtles:\n if turtle.xcor() > 280:\n is_race_on = False\n winning_turtle_color = turtle.pencolor()\n if winning_turtle_color == user_input:\n print(f\"You've Won! The {winning_turtle_color} turtle has won the race.\")\n else:\n print(f\"You've lost! The {winning_turtle_color} turtle has won the race.\")\n\n random_speed = random.randint(0, 10)\n turtle.forward(random_speed)\n\n\nscreen.exitonclick()\n"
},
{
"alpha_fraction": 0.6094674468040466,
"alphanum_fraction": 0.6306001543998718,
"avg_line_length": 22.176469802856445,
"blob_id": "d8b0535e1eef2540857f52c4b11cae697ada8d68",
"content_id": "cab598494e7de5f85934559bcdd29373213d3d2c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1183,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 51,
"path": "/100DaysOfCoding/rain_alert/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import requests\nimport os\nfrom twilio.rest import Client\n\nmy_key = os.environ.get(\"OWM_KEY\")\nparams = {\n \"lat\": 25.014373,\n \"lon\": 67.126956,\n \"appid\": my_key,\n \"exclude\": \"current,minutely,daily,alerts\",\n}\naccount_sid = \"AC0873bc0c362d04c3c5d2c22cc217f5c4\"\nauth_token = os.environ.get(\"TWILIO_AUTH_KEY\")\n\nresponse = requests.get(\n url=\"https://api.openweathermap.org/data/2.5/onecall\",\n params=params,\n)\n\nresponse.raise_for_status()\nprint(response)\n\ndata = response.json()\nhourly_data = data[\"hourly\"][0:12]\nhourly_data_id = [x[\"weather\"][0][\"id\"] for x in hourly_data]\nprint(hourly_data_id)\n\nwill_rain = False\n\nfor value in hourly_data_id:\n if value < 600:\n will_rain = True\n\nif will_rain:\n client = Client(account_sid, auth_token)\n message = client.messages \\\n .create(\n body=\"It's going to rain today. Wear Casual cloths for Office.\",\n from_=\"Twillio Number\",\n to=\"Your Number\"\n )\n print(message.status)\nelse:\n client = Client(account_sid, auth_token)\n message = client.messages \\\n .create(\n body=\"It's not going to rain today.\",\n from_=\"Twillio Number\",\n to=\"Your Number\"\n )\n print(message.status)\n\n"
},
{
"alpha_fraction": 0.6457765698432922,
"alphanum_fraction": 0.6743869185447693,
"avg_line_length": 23.46666717529297,
"blob_id": "0283b8ac682cc9d5ef9db1651579d31c5608d7af",
"content_id": "ac25b404ff8424e0e0657b92d5605dbc48b2a65b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 734,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 30,
"path": "/100DaysOfCoding/Day-33/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import requests\nfrom datetime import *\n\n# response = requests.get(\"http://api.open-notify.org/iss-now.json\")\n#\n# data = response.json()\n#\n# latitude = data[\"iss_position\"][\"latitude\"]\n# longitude = data[\"iss_position\"][\"longitude\"]\n#\n# iss_position = (latitude, longitude)\n#\n# print(iss_position)\n\nparameters = {\n \"lat\": 25.016551,\n \"lng\": 67.122834,\n \"formatted\": 0,\n}\n\nresponse = requests.get(\"https://api.sunrise-sunset.org/json\", params=parameters)\nresponse.raise_for_status()\ndata = response.json()\nsunrise = data[\"results\"][\"sunrise\"].split(\"T\")[1].split(\":\")[0]\nsunset = data[\"results\"][\"sunset\"].split(\"T\")[1].split(\":\")[0]\nprint(sunrise)\nprint(sunset)\n\ncurrent_datetime = datetime.now()\nprint(current_datetime.hour)\n"
},
{
"alpha_fraction": 0.658450722694397,
"alphanum_fraction": 0.6619718074798584,
"avg_line_length": 27.399999618530273,
"blob_id": "2faab506e658db37158b9baa5ec13447b8770af0",
"content_id": "7a94de1d9dffee390c4b04af71c3e75349918e7e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 568,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 20,
"path": "/100DaysOfCoding/Caeser Cypher/caeser_cypher.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "\nfrom caeser_cypher_art import logo\nprint(logo)\n\nfrom caeser_cypher_functions import caeser\nask_for_retry = True\n\nwhile ask_for_retry:\n\n direction = input(\"Type 'encode' to encrypt, type 'decode' to decrypt:\\n\").lower()\n text = input(\"Type your message:\\n\").lower()\n shift = int(input(\"Type the shift number:\\n\"))\n\n shift = shift % 26\n\n caeser(text_input=text,shifting=shift,direction_check=direction)\n\n asking = input(\"Type 'yes' to start again and 'no' to end.\").lower()\n if asking == \"no\":\n ask_for_retry = False\n print(\"Goodbye\")"
},
{
"alpha_fraction": 0.6182572841644287,
"alphanum_fraction": 0.6192945837974548,
"avg_line_length": 22.975000381469727,
"blob_id": "09cfb2941820388ca3137c3d5ddef699a70e5268",
"content_id": "cbb6135c73b29255c589c95a456d6f74f0fa19f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 964,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 40,
"path": "/100DaysOfCoding/Day-30/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# # Try these line of code:\n# try:\n# file = open(\"a_file.txt\")\n# dictionary = {\"key\": \"value\"}\n# print(dictionary[\"keey\"])\n# # Catching the exceptions that if error occurs above then execute this line of code:\n# except FileNotFoundError:\n# file = open(\"a_file.txt\", \"w\")\n# file.write(\"Something\")\n# except KeyError as error_message:\n# print(f\"The key {error_message} doesnt exist\")\n#\n# except IndexError:\n#\n# # Execute this line of code if there was no error:\n# else:\n# content = file.read()\n# print(content)\n# # Execute this no matter what:\n# finally:\n# # file.close()\n# # print(\"File closed\")\n# raise TypeError(\"This is self made.\")\n#\n\n\nfruits = [\"Apple\", \"Pear\", \"Orange\"]\n\n#TODO: Catch the exception and make sure the code runs without crashing.\n\ndef make_pie(index):\n try:\n fruit = fruits[index]\n except IndexError:\n print(\"Fruit pie\")\n else:\n print(fruit + \" pie\")\n\n\nmake_pie(4)\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.5908024311065674,
"alphanum_fraction": 0.612388551235199,
"avg_line_length": 33.33871078491211,
"blob_id": "23b88a3acf31f92ac4ff853923571ca291e15802",
"content_id": "c18d34f060b79d927aaf67fdb45d3137cf38a41d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2131,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 62,
"path": "/100DaysOfCoding/quizzler-app-start/ui.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from tkinter import *\nfrom quiz_brain import QuizBrain\n\nTHEME_COLOR = \"#375362\"\n\n\nclass QuizInterface:\n\n def __init__(self, quiz: QuizBrain):\n self.quiz = quiz\n self.window = Tk()\n self.window.title(\"Quiz GUI\")\n self.window.config(padx=20, pady=20, bg=THEME_COLOR)\n\n self.score_label = Label(text=\"Score: 0\", fg=\"white\", highlightthickness=0, bg=THEME_COLOR)\n self.score_label.grid(column=1, row=0)\n\n self.Canvas = Canvas(height=250, width=300)\n self.question_text = self.Canvas.create_text(\n 150,\n 125,\n width=280,\n text=\"Chai Pi lo\",\n font=[\"Arial\", 20, \"italic\"]\n )\n self.Canvas.grid(column=0, row=1, columnspan=2, pady=50)\n\n right_button = PhotoImage(file=\"images/true.png\")\n self.right_button = Button(image=right_button, highlightthickness=0, command=self.right_answer)\n self.right_button.grid(row=2, column=1)\n\n wrong_button = PhotoImage(file=\"images/false.png\")\n self.wrong_button = Button(image=wrong_button, highlightthickness=0, command=self.wrong_answer)\n self.wrong_button.grid(row=2, column=0)\n\n self.get_next_question()\n\n self.window.mainloop()\n\n def get_next_question(self):\n self.Canvas.config(bg=\"White\")\n if self.quiz.still_has_questions():\n self.score_label.config(text=f\"Score: {self.quiz.score}\")\n q_text = self.quiz.next_question()\n self.Canvas.itemconfig(self.question_text, text=q_text)\n else:\n self.Canvas.itemconfig(self.question_text, text=\"You've completed the quiz.\")\n self.right_button.config(state=\"disabled\")\n self.wrong_button.config(state=\"disabled\")\n\n def right_answer(self):\n self.feedback(self.quiz.check_answer(\"True\"))\n\n def wrong_answer(self):\n self.feedback(self.quiz.check_answer(\"False\"))\n\n def feedback(self, answer):\n if answer:\n self.Canvas.config(bg=\"Green\")\n else:\n self.Canvas.config(bg=\"Red\")\n self.window.after(1000, self.get_next_question)\n\n\n"
},
{
"alpha_fraction": 0.6265822649002075,
"alphanum_fraction": 0.642405092716217,
"avg_line_length": 22.407407760620117,
"blob_id": "305b9b20f0eeda04d75e105468d89795d8c98b29",
"content_id": "b354b6530137604c0a6f790858a139d798ff8dec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 632,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 27,
"path": "/100DaysOfCoding/squirrel_data/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import pandas\n\ndata = pandas.read_csv(\"2018_Central_Park_Squirrel_Census_-_Squirrel_Data.csv\")\n# print(data[\"Primary Fur Color\"])\ngray_count = 0\nblack_count = 0\ncinnamon_count = 0\ncolor_list = data[\"Primary Fur Color\"].to_list()\n\n\nfor color in color_list:\n if color == \"Gray\":\n gray_count += 1\n elif color == \"Black\":\n black_count += 1\n elif color == \"Cinnamon\":\n cinnamon_count += 1\n\n\ndata_dict = {\n \"Fur_Color\": [\"Gray\", \"Black\", \"Cinnamon\"],\n \"Count\": [gray_count, black_count, cinnamon_count],\n}\n\n\ncounting_data = pandas.DataFrame(data_dict)\ncounting_data.to_csv(\"squirrel_count_color-wise\")\n"
},
{
"alpha_fraction": 0.5549019575119019,
"alphanum_fraction": 0.5764706134796143,
"avg_line_length": 21.173913955688477,
"blob_id": "8e1befc6668fd913e09c45e7af9614547dea4b59",
"content_id": "02a30148130e49dbe0f1c04a9f1422f3f3575da5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 510,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 23,
"path": "/100DaysOfCoding/turtle-crossing-start/player.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle\nSTARTING_POSITION = (0, -280)\nMOVE_DISTANCE = 10\nFINISH_LINE_Y = 280\nPLAYERS = []\n\n\nclass Player(Turtle):\n def __init__(self, position):\n super().__init__()\n self.players = []\n self.init_turtle(position)\n\n def init_turtle(self, position):\n # new_player = Turtle()\n self.shape(\"turtle\")\n self.color(\"black\")\n self.penup()\n self.setheading(90)\n self.goto(position)\n\n def move(self):\n self.fd(MOVE_DISTANCE)\n"
},
{
"alpha_fraction": 0.6024525165557861,
"alphanum_fraction": 0.6083860993385315,
"avg_line_length": 75.60606384277344,
"blob_id": "e906f674af06f2c3caaff8e9cf6c936279b9208e",
"content_id": "819cd3777a5fac80dc91bfd0d0fb7a93cb85c1a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2528,
"license_type": "no_license",
"max_line_length": 175,
"num_lines": 33,
"path": "/100DaysOfCoding/quiz-game-start/data.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "question_data = [\n {\n \"category\": \"General Knowledge\",\n \"type\": \"boolean\",\n \"difficulty\": \"easy\",\n \"question\": \"Gumbo is a stew that originated in Louisiana.\",\n \"correct_answer\": \"True\",\n \"incorrect_answers\": [\"False\"]\n },\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"A scientific study on peanuts in bars found traces of over 100 unique specimens of urine.\",\n \"correct_answer\": \"False\", \"incorrect_answers\": [\"True\"]},\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"Bulls are attracted to the color red.\", \"correct_answer\": \"False\", \"incorrect_answers\": [\"True\"]},\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"You can legally drink alcohol while driving in Mississippi.\", \"correct_answer\": \"True\",\n \"incorrect_answers\": [\"False\"]}, {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \""27 Club" is a term used to refer to a list of famous actors, musicians, and artists who died at the age of 27.\",\n \"correct_answer\": \"True\", \"incorrect_answers\": [\"False\"]},\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"Dihydrogen Monoxide was banned due to health risks after being discovered in 1983 inside swimming pools and drinking water.\",\n \"correct_answer\": \"False\", \"incorrect_answers\": [\"True\"]},\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"Scotland voted to become an independent country during the referendum from September 2014.\",\n \"correct_answer\": \"False\", \"incorrect_answers\": [\"True\"]},\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"The National Animal of Scotland is the Unicorn.\", \"correct_answer\": \"True\",\n \"incorrect_answers\": [\"False\"]}, {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"A pasodoble is a type of Italian pasta sauce.\",\n \"correct_answer\": \"False\", \"incorrect_answers\": [\"True\"]},\n {\"category\": \"General Knowledge\", \"type\": \"boolean\", \"difficulty\": \"easy\",\n \"question\": \"The mitochondria is the powerhouse of the cell.\", \"correct_answer\": \"True\",\n \"incorrect_answers\": [\"False\"]}]\n"
},
{
"alpha_fraction": 0.6156914830207825,
"alphanum_fraction": 0.6156914830207825,
"avg_line_length": 28.959999084472656,
"blob_id": "dbf40d25e7974c810d90672571e111dd5d3f2365",
"content_id": "48c7641795a283321df26501484367f8d8ec4216",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 752,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 25,
"path": "/100DaysOfCoding/oop-coffee-machine-start/oop-coffee-machine-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from menu import Menu, MenuItem\nfrom coffee_maker import CoffeeMaker\nfrom money_machine import MoneyMachine\nm = Menu()\ncm = CoffeeMaker()\nmm = MoneyMachine()\n\nturn_off = False\nwhile not turn_off:\n user_selection = input(f\"What would you like to have? {m.get_items()}:\")\n\n if user_selection == \"off\":\n print(\"Machine is turning OFF. BYE BYE...\")\n turn_off = True\n elif user_selection == \"report\":\n cm.report()\n mm.report()\n else:\n item = m.find_drink(user_selection)\n if item is None:\n print(\"We have only three options available.\")\n can_make = cm.is_resource_sufficient(item)\n if can_make is True:\n mm.make_payment(item.cost)\n cm.make_coffee(item)\n\n\n\n"
},
{
"alpha_fraction": 0.6017223000526428,
"alphanum_fraction": 0.6189451217651367,
"avg_line_length": 21.609756469726562,
"blob_id": "50785bfe391718ac56941c56ad684ff9c6060c20",
"content_id": "7639fc29465677f639c192f0672f72a3b3f0ec94",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 929,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 41,
"path": "/100DaysOfCoding/turtle-graphics-day16/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# from turtle import Turtle, Screen\n#\n# champu = Turtle()\n#\n# champu.shape(\"turtle\")\n#\n# champu.color(\"DarkOrange\", \"brown\")\n# champu.position()\n# champu.fd(100)\n# champu.setheading(75)\n# champu.fd(50)\n#\n#\n# turtle_screen = Screen()\n# turtle_screen.exitonclick()\n# turtle_screen.title(\"Welcome to Turtle Cage.\")\n\nfrom prettytable import PrettyTable\ntable_object = PrettyTable()\ntable_object.add_column(\"Pokemon Name\",\n [\"Pikachu\", \"Squirtle\", \"Charmandor\"])\ntable_object.add_column(\"Type\",\n [\"Electric\", \"Water\", \"Fire\"])\ntable_object.align = \"l\"\nprint(table_object)\n\n\ntable_object.clear()\n\n\ntable_object.field_names = [\"Serial Number\", \"Pokemon Name\", \"Type\"]\ntable_object.add_rows(\n [\n [\"#001\", \"Pikachu\", \"Electric\"],\n [\"#002\", \"Squirtle\", \"Water\"],\n [\"#003\", \"Charmandor\", \"Fire\"],\n ]\n)\ntable_object.align[\"Serial Number\"] = \"l\"\n\nprint(table_object)\n\n\n"
},
{
"alpha_fraction": 0.698113203048706,
"alphanum_fraction": 0.698113203048706,
"avg_line_length": 16.66666603088379,
"blob_id": "f8a8390d3cb6e6068ba7bbbb552987a92d0d358e",
"content_id": "bc465fbf1eee6cfae82eb562e0676e28ef029f7e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 106,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 6,
"path": "/100DaysOfCoding/cookie_bot/prices.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\n\n\ndef price(list, index):\n price = list[index].strip()\n return price\n"
},
{
"alpha_fraction": 0.58638995885849,
"alphanum_fraction": 0.5950772166252136,
"avg_line_length": 32.95082092285156,
"blob_id": "80d715e21c42e11f55e1111cf5e696036814a2e0",
"content_id": "7743101b816fce133916e2cfc55f6e70072e8328",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2072,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 61,
"path": "/100DaysOfCoding/rock-paper-scissors.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import random\n\n# Pictures of ROCK, PAPER and SCISSORS.\n\nrock = '''\n _______\n---' ____)\n (_____)\n (_____)\n (____)\n---.__(___)\n'''\n\npaper = '''\n _______\n---' ____)____\n ______)\n _______)\n _______)\n---.__________)\n'''\n\nscissors = '''\n _______\n---' ____)____\n ______)\n __________)\n (____)\n---.__(___)\n'''\nlist = [rock, paper, scissors]\nrandom_selection = random.randint(0, 2)\nuser_selection = int(input(\"Please type '0' for rock, '1' for paper and '2' for scissors: \"))\nif user_selection > 2:\n print(\"You've typed an invalid number. You lose!\")\nelif random_selection == 2 and user_selection == 0:\n print(f\"Computer Selected Scissors!\\n{list[random_selection]}\\n\")\n print(f\"You've Selected Rock!\\n{list[user_selection]}\\n\")\n print(\"You Won! Hurraaaah!\")\nelif random_selection == 1 and user_selection == 0:\n print(f\"Computer Selected Paper!\\n{list[random_selection]}\\n\")\n print(f\"You've Selected Rock!\\n{list[user_selection]}\\n\")\n print(\"Computer Win! Good luck next time..\")\nelif random_selection == 0 and user_selection == 1:\n print(f\"Computer Selected Rock!\\n{list[random_selection]}\\n\")\n print(f\"You've Selected Paper!\\n{list[user_selection]}\\n\")\n print(\"You Won! Hurraaaah!\")\nelif random_selection == 2 and user_selection == 1:\n print(f\"Computer Selected Scissors!\\n{list[random_selection]}\\n\")\n print(f\"You've Selected Paper!\\n{list[user_selection]}\\n\")\n print(\"Computer Win! Good luck next time..\")\nelif random_selection == 0 and user_selection == 2:\n print(f\"Computer Selected Rock!\\n{list[random_selection]}\\n\")\n print(f\"You've Selected Scissors!\\n{list[user_selection]}\\n\")\n print(\"Computer Win! Good luck next time..\")\nelif random_selection == 1 and user_selection == 2:\n print(f\"Computer Selected Paper!\\n{list[random_selection]}\\n\")\n print(f\"You've Selected Scissors!\\n{list[user_selection]}\\n\")\n print(\"You Won! Hurraaaah!\")\nelse:\n print(f\"Computer Choose: {list[random_selection]}\\nYou Choose: {list[user_selection]}\\nDraw!\")\n\n"
},
{
"alpha_fraction": 0.7016393542289734,
"alphanum_fraction": 0.703278660774231,
"avg_line_length": 31.105262756347656,
"blob_id": "2880a85bcef478544ef6535c1455dbb236d1e297",
"content_id": "dde5231d578b9db291bafba9f929df367906d60d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 610,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 19,
"path": "/100DaysOfCoding/Day-48/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\n\n\nchrome_driver_path = \"/snap/bin/chromium.chromedriver\"\nupcoming_events = {}\ndriver = webdriver.Chrome(executable_path=chrome_driver_path)\ndriver.get(\"https://www.python.org/\")\ndates_and_times = driver.find_elements_by_css_selector(\".shrubbery .menu li time\")\nlinks_texts = driver.find_elements_by_css_selector(\".event-widget .shrubbery .menu li a\")\n\nfor number in range(5):\n dictionary = {\n \"time\": dates_and_times[number].text,\n \"name\": links_texts[number].text\n }\n upcoming_events[number] = dictionary\nprint(upcoming_events)\n\ndriver.quit()\n"
},
{
"alpha_fraction": 0.7050113677978516,
"alphanum_fraction": 0.7448747158050537,
"avg_line_length": 31.44444465637207,
"blob_id": "b1c1a9364c91c623a8e41844bb90c620d8f3e218",
"content_id": "81809534874ab4b917c1595399635f47a1a262d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1756,
"license_type": "no_license",
"max_line_length": 206,
"num_lines": 54,
"path": "/100DaysOfCoding/Day-49-LinkInJob/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.common.exceptions import NoSuchElementException\nimport time\n\n\nMY_EMAIL = \"[email protected]\"\nMY_PASSWORD = \"zafarsupari123\"\nMY_NUMBER = \"3218322229\"\nCHROME_PATH = \"/snap/bin/chromium.chromedriver\"\nWEBSITE = \"https://www.linkedin.com/jobs/search?keywords=Python%20Developer&location=Kar%C4%81chi%2C%20Sindh%2C%20Pakistan&locationId=&geoId=105451800&sortBy=R&f_TPR=&distance=25&f_E=2&position=1&pageNum=0\"\n\ndriver = webdriver.Chrome(CHROME_PATH)\ndriver.get(WEBSITE)\n\ntime.sleep(4)\ntry:\n sing_in = driver.find_element_by_xpath('/html/body/div[3]/a[1]')\n sing_in.click()\nexcept NoSuchElementException:\n pass\n\ntime.sleep(1)\nusername = driver.find_element_by_id(\"username\")\nusername.send_keys(MY_EMAIL)\n\npassword = driver.find_element_by_id(\"password\")\npassword.send_keys(MY_PASSWORD)\n\nlogin = driver.find_element_by_xpath('//*[@id=\"organic-div\"]/form/div[3]/button')\nlogin.click()\n\ntime.sleep(6)\n# drop_down = driver.find_element_by_xpath('//*[@id=\"ember194\"]')\n# drop_down.click()\n#\n# time.sleep(2)\ntry:\n apply_now = driver.find_element_by_css_selector(\".display-flex .jobs-s-apply .jobs-apply-button--top-card .jobs-apply-button\")\n apply_now.click()\nexcept NoSuchElementException:\n pass\n\ntime.sleep(2)\nmobile_number = driver.find_element_by_xpath('//*[@id=\"urn:li:fs_easyApplyFormElement:(urn:li:fs_normalized_jobPosting:2713739251,34180051,phoneNumber~nationalNumber)\"]')\nmobile_number.send_keys(MY_NUMBER)\n\ntime.sleep(1)\nnext_button_1 = driver.find_element_by_css_selector(\"footer .display-flex button\")\nnext_button_1.click()\n\ntime.sleep(1)\nnext_button_2 = driver.find_element_by_css_selector(\"footer .display-flex button:last-child\")\nnext_button_2.click()\n\n\n\n\n"
},
{
"alpha_fraction": 0.5194931626319885,
"alphanum_fraction": 0.6023392081260681,
"avg_line_length": 17.962963104248047,
"blob_id": "f28609297f0e5a9bbbfd3d6e6aa1b80c30354016",
"content_id": "124936ec092f0c5f34e61f5ee78b94e27f273694",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1026,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 54,
"path": "/100DaysOfCoding/hirst-painting-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "# Code to import colors from image with the help of colorgram package.\n# import colorgram\n#\n# rgb_colors = []\n# colors = colorgram.extract('image.jpg', 10)\n# for color in colors:\n# r = color.rgb.r\n# g = color.rgb.g\n# b = color.rgb.b\n# rgb = (r, g, b)\n# rgb_colors.append(rgb)\n#\n# print(rgb_colors)\n\nimport turtle as t\nimport random\n\npinky = t.Turtle()\npinky.speed(3)\npinky.shape(\"circle\")\npinky.width(20)\npinky.penup()\npinky.hideturtle()\nt.colormode(255)\n\nrgb_colors = [(202, 164, 109), (238, 240, 245),\n (150, 75, 49), (223, 201, 135), (52, 93, 124), (172, 154, 40), (140, 30, 19)]\n\n\nposition = [-200, -200]\n\n\ndef dot_positions():\n pinky.penup()\n pinky.setpos(position[0], position[1])\n position[1] += 50\n\n\ndef movement():\n for _ in range(10):\n random_color = random.choice(rgb_colors)\n pinky.dot(20, random_color)\n pinky.fd(50)\n\n\ncount = 0\nwhile not count == 10:\n dot_positions()\n movement()\n count += 1\n\n\nscreen = t.Screen()\nscreen.exitonclick()\n\n\n"
},
{
"alpha_fraction": 0.4889543354511261,
"alphanum_fraction": 0.5095729231834412,
"avg_line_length": 21.600000381469727,
"blob_id": "544875c15630b3be43fbab772c4e3f632cb97031",
"content_id": "8521afb1fda16cd72657b1b480738ed27dc557d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 679,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 30,
"path": "/100DaysOfCoding/pong_game/ball.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from turtle import Turtle\n\n\nclass Ball(Turtle):\n def __init__(self):\n super().__init__()\n self.shape(\"circle\")\n self.color(\"white\")\n self.shapesize(stretch_wid=1, stretch_len=1)\n self.penup()\n self.x_cor = 10\n self.y_cor = 10\n self.move_speed = 0.1\n\n def move(self):\n x_cor = self.xcor() + self.x_cor\n y_cor = self.ycor() + self.y_cor\n self.goto(x_cor, y_cor)\n\n def y_bounce(self):\n self.y_cor *= -1\n\n def x_bounce(self):\n self.x_cor *= -1\n self.move_speed *= 0.9\n\n def paddle_misses(self):\n self.home()\n self.move_speed = 0.1\n self.x_bounce()\n\n"
},
{
"alpha_fraction": 0.6173084378242493,
"alphanum_fraction": 0.6300119161605835,
"avg_line_length": 47.44230651855469,
"blob_id": "adb943794d807e9c5282d5c30944417db5f3d7bc",
"content_id": "2b4b5bc98ef4bae139eb7e1dd51f2529c22a663d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2519,
"license_type": "no_license",
"max_line_length": 201,
"num_lines": 52,
"path": "/100DaysOfCoding/Day-51-Twitter-Complaint-Bot/internet_speed_bot.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\nimport time\nfrom selenium.common.exceptions import NoSuchElementException I\nimport datetime as dt\n\n\nclass InternetSpeedTwitterBot:\n def __init__(self, chrome_path):\n self.driver = webdriver.Chrome(chrome_path)\n self.up = 0\n self.down = 0\n self.time = self.time_now()\n\n def get_internet_speed(self):\n self.driver.get(\"https://www.speedtest.net/\")\n test_speed = self.driver.find_element_by_css_selector(\".start-button a\")\n test_speed.click()\n time.sleep(50)\n try:\n self.down = float(self.driver.find_element_by_css_selector(\".result-item .result-data .download-speed\").text)\n self.up = float(self.driver.find_element_by_css_selector(\".result-item .result-data .upload-speed\").text)\n except NoSuchElementException:\n print(\"Speed Not Found\")\n\n def tweet_at_provider(self, email, password):\n self.driver.get(\"https://twitter.com/\")\n try:\n time.sleep(2)\n sign_in = self.driver.find_element_by_xpath('//*[@id=\"react-root\"]/div/div/div/main/div/div/div/div[1]/div/div[3]/div[4]/span')\n sign_in.click()\n time.sleep(2)\n login = self.driver.find_element_by_xpath('//*[@id=\"react-root\"]/div/div/div/main/div/div/div/div[1]/div/div[3]/a')\n login.click()\n time.sleep(1)\n except NoSuchElementException:\n print(\"Page Changed.\")\n time.sleep(8)\n username = self.driver.find_element_by_name(\"session[username_or_email]\")\n username.send_keys(email)\n passwords = self.driver.find_element_by_name(\"session[password]\")\n passwords.send_keys(password)\n login_in_2 = self.driver.find_element_by_xpath('//*[@id=\"react-root\"]/div/div/div[2]/main/div/div/div[2]/form/div/div[3]/div')\n login_in_2.click()\n time.sleep(4)\n content = self.driver.find_element_by_css_selector(\".DraftEditor-root .DraftEditor-editorContainer .public-DraftStyleDefault-block\")\n content.send_keys(f\"Hey Internet Provider, Why is my internet Speed is {self.down}down/{self.up}up when I pay for 150down/10up?\\n{self.time}\")\n tweet = self.driver.find_element_by_xpath('//*[@id=\"react-root\"]/div/div/div[2]/main/div/div/div/div/div/div[2]/div/div[2]/div[1]/div/div/div/div[2]/div[3]/div/div/div[2]/div[3]/div/span/span')\n tweet.click()\n\n def time_now(self):\n now = dt.datetime.now()\n return now.strftime(\"%Y-%m-%d %H:%M\")\n"
},
{
"alpha_fraction": 0.7102272510528564,
"alphanum_fraction": 0.7170454263687134,
"avg_line_length": 43.04999923706055,
"blob_id": "c5a258787f26358f3c0d4a131bcd9beb3736e2d8",
"content_id": "5ce92cad30ce2a885c40c95adbdc713976d6539f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 880,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 20,
"path": "/100DaysOfCoding/Mail+Merge+Project+Start/Mail Merge Project Start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "#TODO: Create a letter using starting_letter.txt\n# print(change_name)\nwith open(\"./Input/Names/invited_names.txt\") as all_names:\n names = all_names.readlines()\n\nwith open(\"./Input/Letters/starting_letter.txt\") as letter:\n letter_blueprint = letter.read()\n\n#for each name in invited_names.txt\nfor name in names:\n stripped_name = name.strip()\n new_letter = letter_blueprint.replace(\"[name]\", stripped_name)\n # Save the letters in the folder \"ReadyToSend\".\n with open(f\"./Output/ReadyToSend/letter_for_{name}.txt\", \"w\") as final_letter:\n final_letter.write(new_letter)\n\n\n#Hint1: This method will help you: https://www.w3schools.com/python/ref_file_readlines.asp\n #Hint2: This method will also help you: https://www.w3schools.com/python/ref_string_replace.asp\n #Hint3: THis method will help you: https://www.w3schools.com/python/ref_string_strip.asp"
},
{
"alpha_fraction": 0.6508264541625977,
"alphanum_fraction": 0.6528925895690918,
"avg_line_length": 30.225807189941406,
"blob_id": "e0e12476ab0e2cd9a00070d5592c55f3d31b151e",
"content_id": "33ca78d009387002494ccfb02b8f50c7b64ae844",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 968,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 31,
"path": "/100DaysOfCoding/birthday-wisher-extrahard-start/main.py",
"repo_name": "moazzam3890/100DaysOfCode-Python",
"src_encoding": "UTF-8",
"text": "import datetime as dt\nimport smtplib\nimport pandas\nimport random\n\nemail = \"[email protected]\"\npassword = \"abcxyz123789\"\ntoday = dt.datetime.now()\ntoday_tuple = (today.month, today.day)\n\ndata = pandas.read_csv(\"birthdays.csv\")\n\n# Dictionary Comprehension:\nbirthday_dict = {(row_of_data[\"month\"], row_of_data[\"day\"]): row_of_data for (index, row_of_data)\n in data.iterrows()}\n\nif today_tuple in birthday_dict:\n birthday_person = birthday_dict[today_tuple]\n letter_path = f\"letter_templates/letter_{random.randint(1,3)}.txt\"\n with open(letter_path) as letter:\n content = letter.read()\n replaced_content = content.replace(\"[NAME]\", birthday_person[\"name\"])\n\n with smtplib.SMTP(\"smtp.gmail.com\") as connection:\n connection.starttls()\n connection.login(email, password)\n connection.sendmail(\n from_addr=email,\n to_addrs=email,\n msg=f\"Subject:Happy Birthday\\n\\n{replaced_content}\"\n )\n"
}
] | 47 |
mappls/mappls.github.io | https://github.com/mappls/mappls.github.io | be28849986345b743480cd01efa37d4095d1a49a | 8b18ca405d50fe7eec4acfc3b1409c9bcd8219a0 | a3f91ee8889223dfbcb5e428852f60beeef834fc | refs/heads/master | 2020-05-01T16:25:57.391662 | 2019-05-09T14:57:01 | 2019-05-09T14:57:01 | 177,571,727 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7588235139846802,
"alphanum_fraction": 0.7588235139846802,
"avg_line_length": 340,
"blob_id": "f22da9cb6d9632a0c7d4d384406c0f62f6589310",
"content_id": "b19f4d47e8981cf039adc0e370de8cb62905b574",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 340,
"license_type": "no_license",
"max_line_length": 340,
"num_lines": 1,
"path": "/app/static/projects/trendmatch recommender-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "<a target=\"_blank\" href=\"https://trendmatch.dk/\">Trendmatch</a> is an innovative Danish startup in the fashion business, connecting users to fashion products through an iOS app. It's my honour to be part of this great <a target=\"_blank\" href=\"https://trendmatch.dk/om-os/\">team</a>, and develop the Machine Learning capabilities of the app."
},
{
"alpha_fraction": 0.7784430980682373,
"alphanum_fraction": 0.7934131622314453,
"avg_line_length": 334,
"blob_id": "3f9ee42e3914caef80973938d61ac5c9de3e284f",
"content_id": "22d0a3572b49f23d2d48e72c1e97897df61c22f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 334,
"license_type": "no_license",
"max_line_length": 334,
"num_lines": 1,
"path": "/app/static/projects/trenddays-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "A project for a US client on predicting a special \"day type\" in the stock market, as early as possible in the day. The main challenge was model optimization: pushing the limits of what a neural network can learn from small amount of data. We managed to achieve 74% precision and 53% recall, 3 hours after stock exchange opening hours."
},
{
"alpha_fraction": 0.7739872336387634,
"alphanum_fraction": 0.7793176770210266,
"avg_line_length": 103.13888549804688,
"blob_id": "cdb58d0b2f0e5055384319ddd885c6456bdb5139",
"content_id": "0a5dfc1d6be9841ff1f2897c70a90de52109ddc9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3752,
"license_type": "no_license",
"max_line_length": 798,
"num_lines": 36,
"path": "/app/static/projects/learnbet-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "An end-to-end Machine Learning project, for forecasting outcomes of football matches. It includes the stages of: data collection, cleaning, storage, processing, predictive modeling, model evaluation, statistical analysis and production deployment. \n\nThis page describes an older version of the project, while a new one is currently in development.\n\n<hr>\n\nI initially started this project in 2016 with the aim to learn Machine Learning, and all the other steps that come before and after it. Therefore, most of the code is built \"from scratch\" i.e., I didn't use any ML frameworks, or data preprocessing, or statistical packages, but tried to code the needed bits in Python. For example, I created this small <a target=\"_blank\" href=\"https://github.com/Misko07/NeuralNet\">NeuralNet</a> project to train a neural network. The forward and back-propagation functions, gradient descent and the rest are coded using the instructions in the famous <a target=\"_blank\" href=\"https://www.coursera.org/learn/machine-learning\">Machine learning course</a> by Andrew Ng. Of course, results were really sub-optimal, but the efforts paid off because of the things learned!\n\n## Data ingestion and storage\n\nUsing available datasets on the Internet is not fun. I started scraping football data off a few websites back in 2016, and by now there's quite a lot. It's mainly data on \"matches\" - goals scored / allowed, statistics, and different betting odds. \n\nI use Python for the web scraping with a headless Chrome browser, and BeautifulSoup for processing the HTML. Scripts scraping different websites are started as separate processes, which get activated in every ~1 hour. This is active 24/7. As new matches arrive, data is initially cleaned up, and saved in MongoDB.\n\n## Data preprocessing\n\nIn this old Learnbet version there was no data pipe to extract transform and load from MongoDB to the ML model, but it would've been handy. For each separate model, I had to manually create a new Python script to gather data from the DB, apply the needed functions, and output a matrix-like data structure for the model's inputs and targets. \n\n## Modeling\n\nThe modeling part was basically done from scratch, including the neural networks and model evaluation methods. One of the first models I used was predicting a 1 / x / 2 outcome (class) of a match. Then different classes had different accuracy / precision / recall. The following figure shows the distribution of match outcomes and predictions, where predictions around 0 are \"home win\", 0.5 - \"draw\", and 1 - \"away win\". \n\n<img class=\"intext-img\" src=\"../static/first_one_hist.png\">\n\nLater on, after being comfortable with using my neural networks, I continued experimenting with models of the `scikit-learn` library, such as: support vector machines (SVMs), logistic regression, decision trees and random forests.\n\n## Deploying in production\nThe first few models were built to work as \"console\" apps. You pass the names of the two playing teams (and an optional date), and get as output the prediction. Later on, I setup a small Flask app access predictions (and other data) on the web, as in the next figure. The web app was first set on GCP, but later on switched to AWS. \n\n<img class=\"intext-img\" src=\"../static/odds_web.png\">\n\n\n## Reporting, logging, statistics\nAs my first end-to-end ML project, I had to monitor the app for things like scrapping crashes and data inconsistencies. Here the `logging` library of Python came very useful, as all important events across the pipeline were gathered in a single text-based log file. \n\nMonitoring performance is always a good idea. In Learnbet I automated the creation of a few separate reports: model stats (all-time or last `n` matches), league stats, and team stats.\n\n \n"
},
{
"alpha_fraction": 0.40410518646240234,
"alphanum_fraction": 0.7297947406768799,
"avg_line_length": 221.67857360839844,
"blob_id": "2167ca85653d4917507d2f8a3c80fa8058f4ae20",
"content_id": "c6f219d3a87589b0394d231617ed7cea32527beb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 6236,
"license_type": "no_license",
"max_line_length": 3978,
"num_lines": 28,
"path": "/app/static/projects/trenddays-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "TrendDays is a project I did for a client trading in the stock market. It's a pure ML project where, given a stock, we were looking to predict a certain type of day, called \"Trend\" day. That is, how the rest of the day will develop, given its start in the morning. The earlier in the day we have a good prediction, the better.\n\n## The data\n\nThe client had collected the needed data over a couple of years long period. Still, some features of the data were collected (and calculated) in a different way throughout the years, so we needed to run a couple of experiments on which parts of the data have biggest predictive power. At the end, we were left with just around 700 sample days, or roughly 3 years data. \n\n## Modeling & evaluation\n\nWe setup the problem as a binary classification one, where Trend days occupied around 10% of the total observed days. With our targets being skewed, we chose evaluation through precision / recall metrics, and the F1-score, as the harmonic mean of the two.\n\nAfter some initial tests with a few quick models from the `scikit-learn` library, we decided to go with two types of neural networks:\n\n- a standard feed-forward one (call it Dense),\n- a Long-Short Term Memory (LSTM) network.\n\nThe two networks were built using `Keras`, and all evaluations were conducted using repeated k-fold cross-validation, due to the lack of data.\n\n## Results\n\nAfter optimising the parameters of the Dense and LSTM models, it turned out that the LSTM one has better performance. Finally it was in the client's interest to maximise the recall metric, at the expense of a smaller precision. That is, to get a more \"risky\" model. This was achieved through adjusting the decision threshold, and provided satisfying results. \n\n<div><script type=\"text/javascript\">window.PlotlyConfig = {MathJaxConfig: 'local'};</script><script src=\"https://cdn.plot.ly/plotly-latest.min.js\"></script><div id=\"18d7f961-d11a-4f4f-8528-ee9d383bc946\" style=\"height: 100%; width: 100%;\" class=\"plotly-graph-div\"></div><script type=\"text/javascript\">window.PLOTLYENV=window.PLOTLYENV || {};window.PLOTLYENV.BASE_URL=\"https://plot.ly\";Plotly.newPlot(\"18d7f961-d11a-4f4f-8528-ee9d383bc946\", [{\"mode\": \"lines+markers\", \"name\": \"precision\", \"x\": [\"09:15:00\", \"09:30:00\", \"09:45:00\", \"10:00:00\", \"10:15:00\", \"10:30:00\", \"10:45:00\", \"11:00:00\", \"11:15:00\", \"11:30:00\", \"11:45:00\", \"12:00:00\", \"12:15:00\", \"12:30:00\", \"12:45:00\", \"13:00:00\", \"13:15:00\", \"13:30:00\", \"13:45:00\", \"14:00:00\", \"14:15:00\", \"14:30:00\", \"14:45:00\", \"15:00:00\", \"15:15:00\", \"15:30:00\", \"15:45:00\", \"16:00:00\"], \"y\": [0.2196969696969697, 0.2569444444444444, 0.27586206896551724, 0.3161290322580645, 0.345, 0.34104046242774566, 0.4025157232704403, 0.32456140350877194, 0.43455497382198954, 0.52, 0.506578947368421, 0.5405405405405406, 0.5317919075144508, 0.5251396648044693, 0.5654761904761905, 0.6381578947368421, 0.5950920245398773, 0.6, 0.6206896551724138, 0.6449704142011834, 0.63125, 0.6282051282051282, 0.6405228758169934, 0.6551724137931034, 0.618421052631579, 0.6329113924050633, 0.6598639455782312, 0.610062893081761], \"type\": \"scatter\", \"uid\": \"386db4bf-fcbf-4864-82ab-a74324c94eed\"}, {\"mode\": \"lines+markers\", \"name\": \"recall\", \"x\": [\"09:15:00\", \"09:30:00\", \"09:45:00\", \"10:00:00\", \"10:15:00\", \"10:30:00\", \"10:45:00\", \"11:00:00\", \"11:15:00\", \"11:30:00\", \"11:45:00\", \"12:00:00\", \"12:15:00\", \"12:30:00\", \"12:45:00\", \"13:00:00\", \"13:15:00\", \"13:30:00\", \"13:45:00\", \"14:00:00\", \"14:15:00\", \"14:30:00\", \"14:45:00\", \"15:00:00\", \"15:15:00\", \"15:30:00\", \"15:45:00\", \"16:00:00\"], \"y\": [0.23387096774193547, 0.29838709677419356, 0.3225806451612903, 0.3951612903225806, 0.5564516129032258, 0.47580645161290325, 0.5161290322580645, 0.5967741935483871, 0.6693548387096774, 0.5241935483870968, 0.6209677419354839, 0.6451612903225806, 0.7419354838709677, 0.7580645161290323, 0.7661290322580645, 0.782258064516129, 0.782258064516129, 0.7983870967741935, 0.8709677419354839, 0.8790322580645161, 0.8145161290322581, 0.7903225806451613, 0.7903225806451613, 0.7661290322580645, 0.7580645161290323, 0.8064516129032258, 0.782258064516129, 0.782258064516129], \"type\": \"scatter\", \"uid\": \"a52f3247-e5d1-4461-8d0b-7b51acd25dba\"}, {\"mode\": \"lines+markers\", \"name\": \"f1\", \"x\": [\"09:15:00\", \"09:30:00\", \"09:45:00\", \"10:00:00\", \"10:15:00\", \"10:30:00\", \"10:45:00\", \"11:00:00\", \"11:15:00\", \"11:30:00\", \"11:45:00\", \"12:00:00\", \"12:15:00\", \"12:30:00\", \"12:45:00\", \"13:00:00\", \"13:15:00\", \"13:30:00\", \"13:45:00\", \"14:00:00\", \"14:15:00\", \"14:30:00\", \"14:45:00\", \"15:00:00\", \"15:15:00\", \"15:30:00\", \"15:45:00\", \"16:00:00\"], \"y\": [0.22651255983863902, 0.2760696903954044, 0.2973480825415976, 0.35120510451240783, 0.4258786822717823, 0.3972577642323899, 0.45224758992114134, 0.4204089150766008, 0.5269363933389183, 0.5220383590079418, 0.5579215334789055, 0.5881856875747317, 0.6194799843191933, 0.6204136974144098, 0.6506360704736117, 0.702849068806033, 0.6759091150004224, 0.6850721171035058, 0.7247836256167466, 0.7439784863538842, 0.7112184124483929, 0.6999506565397227, 0.7075317789254042, 0.7062700108206044, 0.6811099384824217, 0.7091705884036595, 0.7158175222665353, 0.6854631358012988], \"type\": \"scatter\", \"uid\": \"d96095a5-49e7-4ac1-9efc-9f981121a473\"}], {\"title\": {\"text\": \"Precision, recall, F1 score for model 'optimised_02'\"}, \"xaxis\": {\"ticklen\": 5, \"title\": {\"text\": \"Prediction time in the day\"}, \"zeroline\": false}, \"yaxis\": {\"ticklen\": 5, \"title\": {\"text\": \"Percentage\"}, \"zeroline\": false}}, {\"showLink\": false, \"linkText\": \"Export to plot.ly\", \"plotlyServerURL\": \"https://plot.ly\"})</script><script type=\"text/javascript\">window.addEventListener(\"resize\", function(){Plotly.Plots.resize(document.getElementById(\"18d7f961-d11a-4f4f-8528-ee9d383bc946\"));});</script></div>\n\nIn the end we wanted to look at the model's performance in different times in the day. As expected, the model's performance increases as more data is available during the day, reaching an estimated 60% recall at 11am, and 75% at 12:15am. \n\n## Way forward\n\nThe next step for this project is using Ensambling techniques to further increase the performance, such as Boosting and Bagging. These tools should take as input multiple neural nets and decision trees. Once this is done, we can take the project to production.\n\n"
},
{
"alpha_fraction": 0.7575757503509521,
"alphanum_fraction": 0.7696969509124756,
"avg_line_length": 164,
"blob_id": "cc8622913d587e90cc1a1890dfc94c70a3fe7a29",
"content_id": "323f43c4866d83d822135b4f3ca848e84544ce8b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 165,
"license_type": "no_license",
"max_line_length": 164,
"num_lines": 1,
"path": "/static/projects/dl-group-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "A set of 20 small Machine Learning projects built on top of exercises in the <a target=\"_blank\" href=\"https://www.deeplearning.ai\">Deep Learning Specialization</a>.\n"
},
{
"alpha_fraction": 0.714061439037323,
"alphanum_fraction": 0.7696793079376221,
"avg_line_length": 72.11475372314453,
"blob_id": "a6849bac20595f7a3f596bfd1526548382d0e1c8",
"content_id": "59630093057288fb08176316a13e69d02ff5993e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4459,
"license_type": "no_license",
"max_line_length": 231,
"num_lines": 61,
"path": "/static/research/research.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "Papers also available on <a target=\"_blank\" href=\"https://scholar.google.com/citations?hl=en&user=nKImWsoAAAAJ\">Google Scholar</a> & <a target=\"_blank\" href=\"https://www.researchgate.net/profile/Mihajlo_Pavloski\">Research Gate</a>.\n\n## Mathematical modeling, network security\n\nRelated projects: <a target=\"_blank\" href=\"http://www.nemesys-project.eu/nemesys\">EU FP7 NEMESYS</a>, <a target=\"_blank\" href=\"https://konfido-project.eu/\">EU H2020 KONFIDO</a>.\n\n- <a target=\"_blank\" href=\"https://link.springer.com/chapter/10.1007/978-3-319-95189-8_12\"> Signalling attacks in mobile telephony </a>.\n_IEEE ISCIS Security Workshop_, February 2018.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/7774598\">A performance approach to mobile security</a>.\n_IEEE 24th International Symposium on Modeling, Analysis and Simulation of Computer and Telecommunication Systems_, Sept 2016\n\n- <a target=\"_blank\" href=\"https://link.springer.com/chapter/10.1007/978-3-319-22635-4_9\"> Bandwidth usage - based detection of signaling attacks</a>. \n_30th International Symposium on Computer and Information Sciences_, August 2015.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/7004004\"> Modeling and analysis of RRC-based signaling storms in 3G networks</a>.\n_IEEE Transactions on Emerging Topics in Computing_, Jan 2016.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/7518065\"> Counter-based detection and mitigation of signaling attacks</a>. \n_12th International Joint Conference on e-Business and Telecommunications_, July 2015.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/7509491\">Signaling attacks in mobile telephony</a>. \n_11th International Conference on Security and Cryptography_, August 2014. \n\n## Cognitive radio\n\nRelated projects:<a target=\"_blank\" href=\"https://cordis.europa.eu/project/rcn/93764/factsheet/en\">EU FP7 FARAMIR<a>, <a target=\"_blank\" href=\"https://cordis.europa.eu/project/rcn/93812/factsheet/en\"> EU FP7 QUASAR</a>.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/abstract/document/6328334\">Multilevel modeling of spectrum use</a>. \n_9th International Symposium on Wireless Communication Systems_, August 2012.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/6214480\"> Integration of heterogeneous spectrum sensing devices towards accurate REM construction</a>. \n_IEEE Wireless Communications and Networking Conference_, April 2012\n\n- <a target=\"_blank\" href=\"https://www.researchgate.net/publication/303696936_Estimation_Of_Decision_Threshold_In_Energy_Based_Spectrum_Sensing\">Estimation of decision threshold in energy based spectrum sensing</a>. \n_10th International Conference ETAI_, September 2011.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/5936253\">Experimental spectrum sensor testbed for constructing indoor radio environmental maps</a>. \n_IEEE International Symposium on New Frontiers in Dynamic Spectrum Access Networks_, May 2011.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/5936266\">Constructing radio environment maps with heterogeneous spectrum sensors</a>. **Awarded best demonstration**.\n_IEEE International Symposium on New Frontiers in Dynamic Spectrum Access Networks_, May 2011.\n\n- <a target=\"_blank\" href=\"http://journal.mta.ro/index.php?m=volumes&id_volum=15&id_articol=127\"> Efficient spectrum utilization: a cognitive approach</a>.\n_MTA Review_, 2011.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/5702772\"> Parameter settings for 2.4GHz ISM spectrum measurements</a>.\n_3rd International Workshop on Cognitive Radio and Advanced Spectrum Management_, November 2010.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/5509054\">Efficient spectrum utilization: a cognitive approach</a>. \n_8th International Conference on Communications_, June 2010. \n\n## Sensor networks, IoT\n\nRelated project: <a target=\"_blank\" href=\"https://cordis.europa.eu/project/rcn/87580/factsheet/en\"> EU FP7 PROSENSE</a>.\n\n- <a target=\"_blank\" href=\"https://ieeexplore.ieee.org/document/5967066\"> RFID and sensors enabled in-Home elderly care</a>. \n_34th International Convention on Information and Communication Technology, Electronics and Microelectronics_, August 2011.\n\n- <a target=\"_blank\" href=\"https://www.researchgate.net/publication/235695474_Sensors_and_RFID_Enabling_Smart_Space\"> Sensors and RFID enabling smart space</a>.\n_17th Telecommunications Forum_, November 2009."
},
{
"alpha_fraction": 0.7662278413772583,
"alphanum_fraction": 0.7666817903518677,
"avg_line_length": 75,
"blob_id": "d6cc96751ffc91877fec895159fe0251f768bdc8",
"content_id": "bc63051faf34b923443c0713c9ccd70af4bd5228",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2203,
"license_type": "no_license",
"max_line_length": 534,
"num_lines": 29,
"path": "/static/projects/nlp-chunks-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "NLP-chunks is a small project built for fun. I mainly used it to try out different machine learning ideas in the field of Natural Language Processing. Some of the ideas include: word embeddings of different dimensions, one-hot encodings, neural network depth, one-directional and bi-directional Long Short Term Memory (LSTM) networks. \n\nThe project is built of two parts:\n\n- **Name Entity Recognition (NER)**. Here the goal is - in a given text, attach a label to each word, where labels belong to: Person, Organisation, Location, Miscellaneous and Other.\n- **Chunking of phrases**. Similar to NER, to each word in a given text snippet, we need to attach one of the labels: B - beginning of a chunk, I - continuation of a chunk, O - other (not part of any chunk).\n\n## Data and Evaluation\n\nI only had a small amount of data to work with, thanks to the team at <a href=\"https://util.co/\" target=\"_blank\">Util.co</a>, so it was important to properly evaluate the model. This was done with repeated K-fold cross-validation, and I looked at metrics such as precision, recall, F1-score and support. After running all the experiments, it turned out that the best performance was reached by a bi-directional two-layer LSTM network. The following figure shows the performance of the final model for the Name Entity Recognition part.\n\n<img class=\"intext-img\" src=\"../static/NER_performance.png\">\n\nThe approach taken in the Prhases Chunking part was quite similar, and its performance is summarised in the following figure.\n\n<img class=\"intext-img\" src=\"../static/Chunking_performance.png\">\n\n## Examples\n\nAt the moment the both models for NER and Chunking work with running the appropriate Python script in the terminal. Here are a few examples of their work on real text.\n\n<img class=\"intext-img\" src=\"../static/NER_example.png\"/>\n\n<img class=\"intext-img\" src=\"../static/Chunking_example.png\"/>\n\n\n## Way forward\n\nThe main challenge in the work done here was optimising the performance of the model using small amount of data. Since we've learned which things work best in this case, the next step would be to train a model on a bigger dataset, and possibly build a small web app to use the tools on user's inputs."
},
{
"alpha_fraction": 0.7858672142028809,
"alphanum_fraction": 0.7858672142028809,
"avg_line_length": 467,
"blob_id": "ecc8d7e03b9146fdb2ab89829ba7c945f5989e92",
"content_id": "e7b641281d04d49b0198321224eafbeeacc77c1e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 467,
"license_type": "no_license",
"max_line_length": 467,
"num_lines": 1,
"path": "/app/static/projects/chatbot-sam-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "This is the conversational chatbot part of Upskill.ai - an Australian startup company (now <a target=\"_blank\" href=\"https://blaze.online/blog/hello-blaze-online/\">Blaze Online</a>). Chatbot-Sam can identify the intent in the user's question, and direct it to the sub-module which is most likely to have a correct response. The challenge in the project was training on large text datasets, and figuring out the system design in an area which is still in open research."
},
{
"alpha_fraction": 0.7905237078666687,
"alphanum_fraction": 0.7905237078666687,
"avg_line_length": 401,
"blob_id": "72e4db56d254c8f0605e352472893a64dbd6ad0d",
"content_id": "b4ae4f9fce578f8c0394e4a93d7770d25a838296",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 401,
"license_type": "no_license",
"max_line_length": 401,
"num_lines": 1,
"path": "/app/static/projects/learnbet-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "An end-to-end Machine Learning project I worked on for fun, and to learn the basics of ML. It includes the stages of: data collection, cleaning, storage, processing, predictive modeling, model evaluation, statistical analysis and production deployment. Most of the tools were built from scratch, such as this tiny <a target=\"_blank\" href=\"https://github.com/mappls/NeuralNet\">neural network</a> class."
},
{
"alpha_fraction": 0.6652767062187195,
"alphanum_fraction": 0.6732373237609863,
"avg_line_length": 64.95833587646484,
"blob_id": "2a92c70be0d86ca98ad60af6d12fe5ed0e9ea347",
"content_id": "4119c5466b18c925ae8d04db54d9b334f34ce6de",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 7914,
"license_type": "no_license",
"max_line_length": 805,
"num_lines": 120,
"path": "/project/learnbet.html",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "<!doctype html>\n<script>\n function hover(element, imgpath) {\n element.setAttribute('src', imgpath);\n}\n</script>\n<script>\nfunction unhover(element, imgpath) {\n element.setAttribute('src', imgpath);\n}\n</script>\n<!-- Global site tag (gtag.js) - Google Analytics -->\n<script async src=\"https://www.googletagmanager.com/gtag/js?id=UA-137797954-1\"></script>\n<script>\n window.dataLayer = window.dataLayer || [];\n function gtag(){dataLayer.push(arguments);}\n gtag('js', new Date());\n\n gtag('config', 'UA-137797954-1');\n</script>\n<title>Project: learnbet - mappls</title>\n<link rel=\"stylesheet\" href=\"/static/style.css\">\n<div class=\"headerline\">\n <a href=\"/projects\">\n <img id=\"header-img\" src=\"/static/my_img1.jpg\">\n </a>\n <h1 id=\"header-text\">mappls.github.io</h1>\n <div class=\"github\">\n <a href=\"https://github.com/mappls\">\n <img class=\"github-img\" src=\"/static/github-logo-off.png\"\n onmouseover=\"hover(this, '../static/github-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/github-logo-off.png');\" alt=\"Github\" title=\"Github\">\n </a>\n <a href=\"https://www.linkedin.com/in/mp3213/\">\n <img class=\"github-img\" src=\"/static/linkedin-logo-off.png\"\n onmouseover=\"hover(this, '../static/linkedin-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/linkedin-logo-off.png');\" alt=\"LinkedIn\" title=\"LinkedIn\">\n </a>\n <a href=\"https://scholar.google.com/citations?user=nKImWsoAAAAJ&hl=en&oi=ao\">\n <img class=\"github-img\" src=\"/static/scholar-logo-off.png\"\n onmouseover=\"hover(this, '../static/scholar-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/scholar-logo-off.png');\"\n alt=\"Google Scholar\" title=\"Google Scholar\">\n </a>\n <a href=\"mailto:[email protected]\">\n <img class=\"github-img\" src=\"/static/email-logo-off.png\"\n onmouseover=\"hover(this, '../static/email-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/email-logo-off.png');\" alt=\"Email\" title=\"Email\">\n </a>\n </div>\n</div>\n<hr>\n<div class=\"nav-container\">\n<ul class=\"nav-ul\">\n <li><a href=\"/projects\">Projects</a></li>\n <li><a href=\"/research\">Research</a></li>\n <li><a href=\"/bio\">Bio</a></li>\n</ul>\n</div>\n<section class=\"content\">\n <header>\n \n<h1>Project: learnbet</h1>\n\n </header>\n \n<article class=\"post\">\n <header>\n <div class=\"special\">\n <p>An end-to-end Machine Learning project, for forecasting outcomes of football matches. It includes the stages of: data collection, cleaning, storage, processing, predictive modeling, model evaluation, statistical analysis and production deployment. </p>\n<p>This page describes an older version of the project, while a new one is currently in development.</p>\n<hr>\n\n<p>I initially started this project in 2016 with the aim to learn Machine Learning, and all the other steps that come before and after it. Therefore, most of the code is built \"from scratch\" i.e., I didn't use any ML frameworks, or data preprocessing, or statistical packages, but tried to code the needed bits in Python. For example, I created this small <a target=\"_blank\" href=\"https://github.com/Misko07/NeuralNet\">NeuralNet</a> project to train a neural network. The forward and back-propagation functions, gradient descent and the rest are coded using the instructions in the famous <a target=\"_blank\" href=\"https://www.coursera.org/learn/machine-learning\">Machine learning course</a> by Andrew Ng. Of course, results were really sub-optimal, but the efforts paid off because of the things learned!</p>\n<h2>Data ingestion and storage</h2>\n<p>Using available datasets on the Internet is not fun. I started scraping football data off a few websites back in 2016, and by now there's quite a lot. It's mainly data on \"matches\" - goals scored / allowed, statistics, and different betting odds. </p>\n<p>I use Python for the web scraping with a headless Chrome browser, and BeautifulSoup for processing the HTML. Scripts scraping different websites are started as separate processes, which get activated in every ~1 hour. This is active 24/7. As new matches arrive, data is initially cleaned up, and saved in MongoDB.</p>\n<h2>Data preprocessing</h2>\n<p>In this old Learnbet version there was no data pipe to extract transform and load from MongoDB to the ML model, but it would've been handy. For each separate model, I had to manually create a new Python script to gather data from the DB, apply the needed functions, and output a matrix-like data structure for the model's inputs and targets. </p>\n<h2>Modeling</h2>\n<p>The modeling part was basically done from scratch, including the neural networks and model evaluation methods. One of the first models I used was predicting a 1 / x / 2 outcome (class) of a match. Then different classes had different accuracy / precision / recall. The following figure shows the distribution of match outcomes and predictions, where predictions around 0 are \"home win\", 0.5 - \"draw\", and 1 - \"away win\". </p>\n<p><img class=\"intext-img\" src=\"../static/first_one_hist.png\"></p>\n<p>Later on, after being comfortable with using my neural networks, I continued experimenting with models of the <code>scikit-learn</code> library, such as: support vector machines (SVMs), logistic regression, decision trees and random forests.</p>\n<h2>Deploying in production</h2>\n<p>The first few models were built to work as \"console\" apps. You pass the names of the two playing teams (and an optional date), and get as output the prediction. Later on, I setup a small Flask app access predictions (and other data) on the web, as in the next figure. The web app was first set on GCP, but later on switched to AWS. </p>\n<p><img class=\"intext-img\" src=\"../static/odds_web.png\"></p>\n<h2>Reporting, logging, statistics</h2>\n<p>As my first end-to-end ML project, I had to monitor the app for things like scrapping crashes and data inconsistencies. Here the <code>logging</code> library of Python came very useful, as all important events across the pipeline were gathered in a single text-based log file. </p>\n<p>Monitoring performance is always a good idea. In Learnbet I automated the creation of a few separate reports: model stats (all-time or last <code>n</code> matches), league stats, and team stats.</p>\n </div>\n </header>\n</article>\n\n</section>\n<hr>\n<div class=\"footer\">\n <div class=\"github\">\n <a href=\"https://github.com/mappls\">\n <img class=\"github-img\" src=\"/static/github-logo-off.png\"\n onmouseover=\"hover(this, '../static/github-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/github-logo-off.png');\" alt=\"Github\" title=\"Github\">\n </a>\n <a href=\"https://www.linkedin.com/in/mp3213/\">\n <img class=\"github-img\" src=\"/static/linkedin-logo-off.png\"\n onmouseover=\"hover(this, '../static/linkedin-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/linkedin-logo-off.png');\" alt=\"LinkedIn\" title=\"LinkedIn\">\n </a>\n <a href=\"https://scholar.google.com/citations?user=nKImWsoAAAAJ&hl=en&oi=ao\">\n <img class=\"github-img\" src=\"/static/scholar-logo-off.png\"\n onmouseover=\"hover(this, '../static/scholar-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/scholar-logo-off.png');\"\n alt=\"Google Scholar\" title=\"Google Scholar\">\n </a>\n <a href=\"mailto:[email protected]\">\n <img class=\"github-img\" src=\"/static/email-logo-off.png\"\n onmouseover=\"hover(this, '../static/email-logo-on-smaller.png');\"\n onmouseout=\"unhover(this, '../static/email-logo-off.png');\" alt=\"Email\" title=\"Email\">\n </a>\n </div>\n</div>"
},
{
"alpha_fraction": 0.7994670271873474,
"alphanum_fraction": 0.8007994890213013,
"avg_line_length": 33.09090805053711,
"blob_id": "7347899c826c23d2d86580038eb9d44e874bdcb4",
"content_id": "a721149db4183f91a19b42c8739cc61e8233d27f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1501,
"license_type": "no_license",
"max_line_length": 251,
"num_lines": 44,
"path": "/static/projects/playfield-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "A group of Machine learning & Algorithms projects. It consists of over 20 smaller ML projects in Keras, TensorFlow or scikit-learn, and a few Python implementations of well-known algorithms and data structures. Projects are here grouped by tools used.\n\n\n## Algorithms and data structures\nPython implementations of the following:\n\n- Binary Search Trees\n- Mergesort\n- Quicksort\n- HeapSort\n- PriorityQueue\n- LinkedList \n\n## Keras\n\nSmall projects in the form of Jupyter notebooks for:\n\n- Sentiment classification with feed-forward neural networks;\n- Multi-class sentiment classification with feed-forward neural nets;\n- Sentiment classification with learned & pretrained word embeddings;\n- Regression with neural networks and K-fold cross-validation on house price prediction;\n- Image classification with ConvNets and augmentation;\n- Image classification with pretrained ConvNets;\n- Visualising intermediate activations in ConvNets;\n- Visualising ConvNet filters;\n- Visualising ConvNet class activations;\n- Sentiment classification with recurrent neural nets (RNN & LSTM);\n- Regression for temperature forecasting with feed-forward, recurrent and bi-directional neural nets;\n\n\n## scikit-learn\n\nA few short experiments in:\n\n- Linear regression with different polynomial degrees;\n- Naive Bayes & Support Vector Machine classification, Kmeans clustering;\n- Data preprocessing (one-hot encoding, label encoding, etc.);\n\n## TensorFlow\n\nShort experiments in:\n\n- Linear regression;\n- Building a neural network;\n\n"
},
{
"alpha_fraction": 0.7876923084259033,
"alphanum_fraction": 0.7938461303710938,
"avg_line_length": 324,
"blob_id": "16b1b22292e3d2bc21f8eca0e9f0df759777ceb3",
"content_id": "15d1a5051501bfbd23f125e224e6d4e90db556ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 325,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 1,
"path": "/app/static/projects/playfield-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "A group of Machine learning & Algorithms projects I occasionally do for learning and fun. <a target=\"_blank\" href=\"https://github.com/mappls/Playfield\">The project</a> consists of over 20 smaller ML projects in Keras, TensorFlow or scikit-learn, and a few Python implementations of well-known algorithms and data structures. "
},
{
"alpha_fraction": 0.8299319744110107,
"alphanum_fraction": 0.8299319744110107,
"avg_line_length": 146,
"blob_id": "198f91ebc47cfce02b39c7206ea94770ed685875",
"content_id": "bad9ae8282ddd5569f41d79bb693722f3db9418d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 147,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 1,
"path": "/app/static/projects/nlp-chunks-short.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "An NLP project on chunking phrases and name entity recognition in text inputs. Optimised to reach maximum performance on a small training dataset. "
},
{
"alpha_fraction": 0.590461790561676,
"alphanum_fraction": 0.591975748538971,
"avg_line_length": 27.09929084777832,
"blob_id": "4b75cdfb8eb2b90bf8767100de671b30d24e7da0",
"content_id": "bf92c32f0654603a4f0bdf2e41e1e4afffe8b6d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3963,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 141,
"path": "/app/index.py",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "\nfrom flask import Flask, render_template, Markup\nfrom flask_frozen import Freezer\nfrom markdown import markdown\nimport shutil\nimport json\nimport sys\nimport os\n\napp = Flask(__name__)\napp.config['FREEZER_DESTINATION'] = '../build'\nfreezer = Freezer(app)\n\n\ndef get_projects_json(filepath=\"static/projects/projects.json\"):\n with open(filepath, \"r\") as file:\n pl = json.load(file)\n return pl\n\n\ndef get_project_descriptions(title):\n\n # Get short and long description files\n files = os.listdir(\"static/projects/\")\n mds_short = [file for file in files if file[-9:] == \"-short.md\"]\n mds_long = [file for file in files if file[-8:] == \"-long.md\"]\n\n short = None\n long = None\n\n for filename in mds_short:\n if title in filename:\n with open(\"static/projects/%s\" % filename) as file:\n short = file.read()\n\n for filename in mds_long:\n if title in filename:\n with open(\"static/projects/%s\" % filename) as file:\n long = file.read()\n\n return long, short\n\n\[email protected]('/')\ndef get_index():\n projects = []\n pl = get_projects_json()\n for p in pl:\n ldesc, sdesc = get_project_descriptions(p['title'])\n p['sdesc'] = Markup(markdown(sdesc))\n p['ldesc'] = Markup(markdown(ldesc))\n projects.append(p)\n return render_template('projects.html', projects=pl)\n\n\[email protected]('/projects', methods=['GET', 'POST'])\ndef get_projects():\n projects = []\n pl = get_projects_json()\n for p in pl:\n ldesc, sdesc = get_project_descriptions(p['title'])\n p['sdesc'] = Markup(markdown(sdesc))\n p['ldesc'] = Markup(markdown(ldesc))\n projects.append(p)\n\n return render_template('projects.html', projects=pl)\n\n\[email protected]('/research', methods=['GET', 'POST'])\ndef get_research():\n with open('static/research/research.md') as file:\n research = file.read()\n research = Markup(markdown(research))\n return render_template('research.html', research=research)\n\n\[email protected]('/bio', methods=['GET', 'POST'])\ndef get_bio():\n return render_template('bio.html')\n\n\[email protected]('/project/<string:title>', methods=['GET', 'POST'])\ndef get_project(title):\n project = {}\n pl = get_projects_json()\n for p in pl:\n if p['title'] == title:\n project = p\n break\n\n print('** title', title)\n ldesc, sdesc = get_project_descriptions(title)\n project['sdesc'] = Markup(markdown(sdesc))\n project['ldesc'] = Markup(markdown(ldesc))\n return render_template('project.html', project=project)\n\n\[email protected]_generator\ndef projects_generator():\n pl = get_projects_json()\n for p in pl:\n yield \"/project/%s\" % p['title']\n\n\ndef copytree(src, dst, symlinks=False):\n\n # Remove old files\n for item in os.listdir(src):\n if item in os.listdir(dst):\n try:\n os.remove(dst + \"/\" + item)\n except PermissionError:\n shutil.rmtree(os.path.join(dst, item))\n\n # Copy all files\n for item in os.listdir(src):\n s = os.path.join(src, item)\n d = os.path.join(dst, item)\n if os.path.isdir(s):\n shutil.move(s, d, symlinks)\n else:\n shutil.copy2(s, d)\n\n for file in ['../projects', '../research', '../bio', '../project/chatbot-sam', '../project/dl-group',\n '../project/learnbet', '../project/nlp-chunks', '../project/playfield', '../project/trenddays',\n '../project/trendmatch recommender']:\n if os.path.isfile(file):\n os.rename(file, file + '.html')\n\n shutil.rmtree(src)\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 2:\n sys.exit(\"Use with 'run' or 'build' argument,\")\n if sys.argv[1] == 'run':\n app.run(debug=True)\n elif sys.argv[1] == 'build':\n freezer.freeze()\n\n # Copy all files from build to root folder\n copytree(src=app.config['FREEZER_DESTINATION'], dst=\"..\")\n"
},
{
"alpha_fraction": 0.78837651014328,
"alphanum_fraction": 0.7909033298492432,
"avg_line_length": 104.53333282470703,
"blob_id": "c6c2fc97753d6b3f3bad83520870e34fcb516a29",
"content_id": "a2129980a771424702fc916a1a3dbc5bbd12b41f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1583,
"license_type": "no_license",
"max_line_length": 460,
"num_lines": 15,
"path": "/app/static/projects/trendmatch recommender-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "Trendmatch is an innovative Danish <a target=\"_blank\" href=\"https://trendmatch.dk/\">startup company</a> in the fashion business. In one app it connects buyers and sellers - both large and small. Their shopping experience is as easy as Swipe -> Match -> Purchase.\n\nProduct recommendation is one of the most important features of the app, with an ultimate goal of increasing the number of matches between users and products. This in turn would result in higher profits for the sellers and better clothes for the buyers.\n\n<img class=\"intext-img\" src=\"../static/trendmatch_app.png\">\n\n## Design & challenges\n\nThe recommender's design started around the same time as the app's backend. This made sure that the algorithm will get the needed data, and the rest of the data infrastructure will be able to provide it. This was made possible only by great collaboration between the data, backend and leadership teams of Trendmatch.\n\nThe algorithm should be able to function in real-time i.e., in sub-second time intervals, between two user swipes. To be able to get accurate recommendations, we must combine product data with information on both short and long-term user actions. Some of the challenges here arise with incomplete user data, such as unknown user gender or age. Other challenges are connected to the way data is stored and computed, so sub-second predictions are made possible. \n\n## Development\n\nAs of March 2019, the Trendmatch app has been functional for a few months now, collecting a substantial amount of data. Thus, the development process of the recommender is under way! "
},
{
"alpha_fraction": 0.8141797780990601,
"alphanum_fraction": 0.8169601559638977,
"avg_line_length": 76.10713958740234,
"blob_id": "f0e7f37b28c0f751df0e2daef420d214039411c7",
"content_id": "03957d9b17e678dd2c71b8b8ee115d14d423d61e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2158,
"license_type": "no_license",
"max_line_length": 453,
"num_lines": 28,
"path": "/app/static/projects/dl-group-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "This is a group of 20 small projects in a few different areas in Deep Learning, as part of the Specialization Programme <a target=\"_blank\" href=\"https://www.deeplearning.ai\">deeplearning.ai</a>. They include aspects from both Research and Applied deep learning domains.\n\nThe research domain included the implementation of the actual neural networks in Python like: the forward and back-propagation, loss functions and gradient descent optimization. These were coded for Feed-forward, Convolutional, and Recurrent neural nets. Then, a few important algorithms were implemented on top, such as: regularization (L2 and Dropout), initialisation, gradient checking, residual connections, batch normalization, YOLO, attention etc.\n\nIn the applied domain, the above networks were applied on problems like image classification, object detection, text generation, machine translation, sentiment classification, neural style transfer etc.\n\nThe full list of projects includes:\n\n- Implementing the Logistic regression algorithm;\n- Implementing a shallow neural network;\n- Implementing a deep neural network;\n- Image classification using the above deep neural net;\n- Experimenting with various neural net initialisation techniques;\n- Implementing L2 and Dropout regularization;\n- Implementing Gradient Checking in backpropagation;\n- Implementing Convolutional neural nets;\n- Building a ConvNet in TensorFlow for image classification;\n- Building a ConvNet in Keras for image classification;\n- Building a 50-layer ConvNet in Keras with Residual connections and BatchNorm for image classification;\n- Implementing the YOLO algorithm in ConvNets for object detection;\n- Implementing a Neural Style Transfer algorithm for generating artistic images in TensorFlow;\n- Face Verification and Recognition with ConvNets in Keras;\n- Implementing a Recurrent Neural Network (RNN and LSTM cells);\n- Name generation with the above implemented RNN cells;\n- Jazz music generation with LSTM networks in Keras;\n- Word embeddings for word analogies and word de-biasing;\n- Neural machine translation for dates translation using Attention algorithm and Keras;\n- Trigger word detection in Keras."
},
{
"alpha_fraction": 0.504792332649231,
"alphanum_fraction": 0.5111821293830872,
"avg_line_length": 19.733333587646484,
"blob_id": "8cf60defa2be247dbcc6b0d3d9bd8330c57bc357",
"content_id": "5fb8ad4ed82888cf6be3b59b283f66fba57f117e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 313,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 15,
"path": "/app/templates/project.html",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "{% extends 'base.html' %}\n\n{% block header %}\n<h1>{% block title %}Project: {{ project['title'] }}{% endblock %}</h1>\n{% endblock %}\n\n{% block content %}\n<article class=\"post\">\n <header>\n <div class=\"special\">\n {{ project['ldesc'] }}\n </div>\n </header>\n</article>\n{% endblock %}\n\n\n"
},
{
"alpha_fraction": 0.7658079862594604,
"alphanum_fraction": 0.7751756310462952,
"avg_line_length": 133.1999969482422,
"blob_id": "8d6315ee8df94d1940857f59f10853fc8cc8d9c3",
"content_id": "b322f6eb198539041d3fe681167ad7c46854b93a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4697,
"license_type": "no_license",
"max_line_length": 697,
"num_lines": 35,
"path": "/static/projects/chatbot-sam-long.md",
"repo_name": "mappls/mappls.github.io",
"src_encoding": "UTF-8",
"text": "Chatbot-Sam is a Python module for an intelligent conversational chatbot. Although never officially released, its draft version was built as part of the Upskill.ai startup idea. Upskill.ai in turn, was part of another small Australian company called Faststores, now rebranded to <a target=\"_blank\" href=\"https://blaze.online/blog/hello-blaze-online/\">Blaze Online</a>. Complicated yes.\n\n## The Upskill.ai project\n\nThe idea behind Upskill.ai was to build software that will enhance people's learning on a closed topic. The Minimum Viable Product version was planned to have a conversational chatbot with \"some\" knowledge in a given domain, connected to an augmented face giving a realistic delivery of chatbot's answers. A huge idea, and a heaven for Machine Learning engineers. \n\n<img class=\"intext-img\" src =\"../static/upskill-logo.png\" width=\"350px\" />\n\n## Intent recognition\n\nThe conversational, or question-answering domain is still a hot research topic in AI, and probably one still missing an enabler technology. Usually chatbot solutions are made up of multiple sub-systems, each capable of doing some part of the work. Same goes for Chatbot-Sam: depending on the intent of the user's question, the question was forwarded to the sub-module which is trained to give an answer in that domain. \n\nA crutial part here is our \"intent recongizer\" which is trained to estimate the user's intent, given his question. Once we built this part, we could break up the problem in many smaller parts and address each separately. An easy way to build intent recognition is with <a target=\"_blank\" href=\"https://wit.ai/\">Wit.ai</a>.\n\n## Open-domain QA - the Deep learning module\n\nAt the time of developing Chatbot-Sam, Deep learning seemed to be the best approach to build \"intelligent\" chatbots. Of course, we tried this approach using an encoder-decoder type of recurrent neural networks (RNNs) where a user's question is passed in the encoder, and the decoder uses the encoder's input and generates an answer. Here are a few useful resources to get started: <a target=\"_blank\" href=\"https://github.com/Conchylicultor/DeepQA)\">DeepQA</a>, <a target=\"_blank\" href=\"http://www.wildml.com/2016/04/deep-learning-for-chatbots-part-1-introduction/\">WildML</a>, <a target=\"_blank\" href=\"https://arxiv.org/abs/1409.3215\">Google's paper</a>.\n\nThe downside of this approach is the needed training set of question-answer pairs. This is especially a problem if we're building a chatbot in a closed domain - you'd need to spend lots of time to assemble a nice dataset in a given domain. In our case, we experimented with a few different movie subtitle datasets (<a target=\"_blank\" href=\"http://opus.nlpl.eu/OpenSubtitles-v2018.php\">OpenSubtitles</a>, <a target=\"_blank\" href=\"https://www.cs.cornell.edu/~cristian/Cornell_Movie-Dialogs_Corpus.html\">Cornell</a>, <a target=\"_blank\" href=\"https://arxiv.org/abs/1506.08909\">Ubuntu</a>), while more datasets can be found in this <a target=\"_blank\" href=\"https://arxiv.org/abs/1512.05742\">paper</a>. \n\nOur DL model trained on a movie dataset was only able to give satisfying answers in some everyday questions (eg. \"what color is the sky at night?\"). Therefore it was used as a \"default\" module, i.e. the intent recognizer will only forward a question to it, if no other module is capable of giving a good answer.\n\n## Closed-domain QA: The rule-based module\n\nSome questions are more or less easy to answer. For example \"what's your name?\" or \"where do you come from?\". When the intend recognizer detects such question, it forwards it to the rule-based module, where the answer is hardcoded. We could deliver more interesting answers by adding randomness using `numpy.random` in the sentence composition part. \n\n## Closed-domain QA: The Deep learning module (2)\n\nAfter some time experimenting in the open-domain DL module, we reached a nice solution of training a DL model in the closed-domain. The solution uses a multiple input single output model. The first input branch processes the user's question, while in the second one we feed a text snippet containing the answer to the question. Both branches can use Long-Short Term Memory (LSTM) cells. The network is trained in a supervised way, by providing the answers in the output. \n\n<img class=\"intext-img\" src =\"../static/sam-qa.png\" width=\"400px\"/>\n\n## Deployment in production\n\nOur team at Upskill.ai used the Google Cloud for both development and deployment. Chatbot-Sam was deployed in GCP in a small webapp internally for the Upskill.ai team. Unfortunately, all the building blocks of Upskill.ai (including text-to-speech, lip-syncing and face composition modules) were never deployed together as a single product.\n"
}
] | 18 |
Arnavbd1971/lc_report_generator_odoo8_aahold | https://github.com/Arnavbd1971/lc_report_generator_odoo8_aahold | e5057dab71755cec94aaa906f4c0b5a61b812b6d | 41750448bfe8cc9643dd781bcb26efda3178fb5e | 6119cf954d51c337afb185a867b5671f5d623b8e | refs/heads/master | 2021-09-11T19:07:55.291102 | 2018-04-11T06:59:41 | 2018-04-11T06:59:41 | 117,797,727 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7195571660995483,
"alphanum_fraction": 0.7269372940063477,
"avg_line_length": 32.875,
"blob_id": "adedb2bffcce7041f32806053811d7dfdf80938a",
"content_id": "0f5d9ab34ad5c278679a1b2a76254d44a80bcceb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 271,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 8,
"path": "/models/country_origin.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields\n\nclass LCinformations(models.Model):\n _name = 'country_origin.model'\n\n\n name = fields.Char(required=True, string='Country Of Origin Name',size=64)\n date = fields.Date('Created date', required=True, default=fields.Date.today())\n"
},
{
"alpha_fraction": 0.5980765223503113,
"alphanum_fraction": 0.6022173166275024,
"avg_line_length": 42.91202163696289,
"blob_id": "72ac6a05b58d99411a9ba742e4b118e549c3508c",
"content_id": "2bce8eacf47a539c507d56fbc250434ac9990f7f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14973,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 341,
"path": "/models/proforma_invoice.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nfrom openerp import exceptions\n\nclass ProformaInvoiceModel(models.Model):\n _name = 'proforma_invoice.model'\n\n _rec_name = \"name\"\n\n name = fields.Char(string='P/I No.', size=100, readonly=True)\n proforma_invoice_created_date = fields.Date(string='Created Date', readonly=True,default=fields.Date.today(), required=True)\n validity_date = fields.Date(string='Validity Date', default=fields.Date.today(), required=True)\n\n account_invoice_id = fields.Many2one('account.invoice',string='Customer Invoice No.', required=True)\n\n customer_name = fields.Char(string='Customer Name',required=True)\n customer_full_address = fields.Text(string='Customer Address',required=True) \n\n ordered_products_name = fields.Text(string='ordered_products_name') \n ordered_products_price_of_unit = fields.Text(string='ordered_products_price_of_unit') \n ordered_products_quantity = fields.Text(string='ordered_products_quantity')\n ordered_products_amount = fields.Text(string='ordered_products_amount')\n\n ordered_products_total_quantity = fields.Char(string='ordered_products_total_quantity') \n ordered_products_total_amount = fields.Char(string='Total')\n ordered_products_total_amount_in_word = fields.Char(string='ordered_products_total_amount_in_word')\n\n currency_symbol_name = fields.Char(string='currency_symbol_name')\n currency_symbol_name1 = fields.Char(string='currency_symbol_name')\n currency_symbol_name2 = fields.Char(string='currency_symbol_name')\n currency_symbol = fields.Char(string='')\n currency_symbol1 = fields.Char(string='')\n currency_symbol2 = fields.Char(string='')\n\n beneficiary_full_name = fields.Many2one('beneficiary_full_name.model',string='Beneficiary Full Name',required=True) \n # beneficiary_full_name = fields.One2many('behalf_of',string='Beneficiary Full Name',copy=True) \n erc_no = fields.Char(string='ERC NO.',required=True)\n method_of_payment = fields.Many2one('method_of_payment.model',string='Method of Payment',required=True)\n reimbursement = fields.Many2one('reimbursement.model',string='Reimbursement',required=True)\n\n beneficiary_bank_name = fields.Many2one('bank_names.model',string='Beneficiary Bank Name',required=True)\n beneficiary_bank_branch = fields.Many2one('bank_branch.model',string='Beneficiary Bank Branch',required=True)\n beneficiary_bank_address = fields.Text(string='Beneficiary Bank Address',required=True)\n swift_code = fields.Char(string='Swift Code',required=True)\n\n product_type = fields.Many2one('product_type.model',string='Type',required=True)\n bin_no = fields.Char(string='BIN',required=True)\n country_of_origin = fields.Many2one('country_origin.model',string='Country Of Origin', required=True)\n terms_of_delivery = fields.Many2one('terms_of_delivery.model',string='Terms of Delivery',required=True) \n time_of_delivery = fields.Char(string='Time of Delivery',required=True)\n\n place_of_delivery_name = fields.Many2one('supplier_factory_name_address.model',string='Delivery Factory Name', required=True)\n place_of_delivery_addr = fields.Text(string='Delivery Factory Address',required=True)\n\n bags_of_packing = fields.Char(string='Packing',required=True) \n\n other_terms_and_condition = fields.Many2one('terms_conditions.model',string='Other Terms & Condition',required=True)\n behalf_of = fields.Char(string='On behalf of',required=True)\n\n\n\n #This fuction is for create a uniq number for a invoice report.\n @api.model\n def create(self, vals):\n \"\"\"\n Overrides orm create method.\n @param self: The object pointer\n @param vals: dictionary of fields value.\n \"\"\"\n if not vals:\n vals = {}\n seq_obj = self.env['ir.sequence']\n invoice_num = seq_obj.next_by_code('proforma_invoice.model') or 'New'\n vals['name'] = invoice_num\n return super(ProformaInvoiceModel, self).create(vals)\n\n # @api.onchange('validity_date') \n # def date_validation(self):\n # if self.validity_date < fields.Date.today():\n # raise exceptions.ValidationError(_(\"Validity date is can't less then current date.\")) \n\n\n\n # @api.onchange('beneficiary_full_name') \n # def beneficiary_full_name(self):\n # res = {}\n\n # beneficiary_full_names = form.getvalue(self.beneficiary_full_name)\n\n # if len(beneficiary_full_names) != 0 :\n # res = {'value':{'behalf_of':beneficiary_full_names,}}\n # else:\n # res = {}\n # return res \n\n\n\n # This function is for load data automatically in the existing field from another table \n def onchange_account_invoice_id(self, cr, uid, ids, account_invoice_id=False, context=None):\n res= {}\n if account_invoice_id:\n\n service_obj= self.pool.get('account.invoice').browse(cr, uid,account_invoice_id,context=context)\n service_obj2= self.pool.get('res.partner').browse(cr, uid,service_obj.partner_id.id,context=context)\n service_obj3= self.pool.get('res.country').browse(cr, uid,service_obj2.country_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n \n cus_full_address = str(service_obj2.street) + \" , \" + str(service_obj2.street2) + \" , \" + str(service_obj2.city)+ \" - \" + str(service_obj2.zip) + \" , \" + str(service_obj3.name)\n\n\n\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_id),],context=context)\n\n invoice_lines_product_name = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['name'], context=context)\n\n invoice_lines_product_quantity = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['quantity'], context=context)\n\n invoice_lines_product_price_of_unit = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_unit'], context=context)\n\n invoice_lines_product_amount = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_subtotal'], context=context)\n\n \n\n\n ordered_products_names = self.split_products_names(invoice_lines_product_name) \n\n # ordered_products_number_of_bags = self.split_products_number_of_bags(invoice_lines_product_quantity)\n\n ordered_products_quantity = self.split_products_quantity(invoice_lines_product_quantity)\n\n ordered_products_price_of_unit = self.split_products_price_of_unit(invoice_lines_product_price_of_unit)\n\n ordered_products_amount = self.split_products_amount(invoice_lines_product_amount)\n\n ordered_products_total_quantity = self.products_total_quantity(invoice_lines_product_quantity)\n\n ordered_products_total_amount = self.products_total_amount(invoice_lines_product_amount)\n\n ordered_products_total_amount_in_word = self.numToWords(ordered_products_total_amount)\n\n\n\n\n\n\n res = {'value':{'account_invoice_id2':service_obj.number,'invoice_created_date':service_obj.date_invoice,\n 'customer_name':service_obj2.name, \n 'customer_full_address':cus_full_address,\n 'ordered_products_name':ordered_products_names,\n 'ordered_products_price_of_unit':ordered_products_price_of_unit,\n 'ordered_products_quantity':ordered_products_quantity,\n 'ordered_products_amount':ordered_products_amount,\n 'ordered_products_total_quantity': \"{:,}\".format( ordered_products_total_quantity ), \n 'ordered_products_total_amount': \"{:,}\".format( ordered_products_total_amount ),\n 'ordered_products_total_amount_in_word':ordered_products_total_amount_in_word,\n 'currency_symbol_name':currency_symbol.name,\n 'currency_symbol_name1':currency_symbol.name,\n 'currency_symbol_name2':currency_symbol.name,\n 'currency_symbol':currency_symbol.symbol,\n 'currency_symbol1':currency_symbol.symbol, \n 'currency_symbol2':currency_symbol.symbol\n }}\n \n\n else:\n res={} \n return res\n def split_products_names(self,invoice_lines_product_name):\n names= []\n idx = 0\n for r in invoice_lines_product_name:\n names.append(r['name'])\n combine = '\\n \\n'.join([str(i) for i in names]) \n return combine \n\n def split_products_quantity(self,invoice_lines_product_quantity):\n quantity= []\n idx = 0\n for r in invoice_lines_product_quantity:\n quantity.append( \"{:,}\".format( int(r['quantity']) ))\n combine = '\\n \\n \\n'.join([str(i) for i in quantity])\n return combine\n\n def split_products_price_of_unit(self,invoice_lines_product_price_of_unit):\n price_of_unit= []\n idx = 0\n for r in invoice_lines_product_price_of_unit:\n price_of_unit.append( \"{:,}\".format( r['price_unit']) )\n combine = '\\n \\n \\n'.join([str(i) for i in price_of_unit])\n return combine\n\n def split_products_amount(self,invoice_lines_product_amount):\n amount= []\n idx = 0\n for r in invoice_lines_product_amount:\n amount.append( \"{:,}\".format( r['price_subtotal']) )\n combine = '\\n \\n \\n'.join([str(i) for i in amount])\n return combine \n def products_total_quantity(self,invoice_lines_product_quantity):\n total_quantity= []\n idx = 0\n for r in invoice_lines_product_quantity:\n total_quantity.append(r['quantity'])\n in_com = sum(total_quantity)\n combine = int(in_com)\n return combine \n\n def products_total_amount(self,invoice_lines_product_amount):\n total_amount= []\n idx = 0\n for r in invoice_lines_product_amount:\n total_amount.append(r['price_subtotal'])\n combine = sum(total_amount)\n return combine\n\n def onchange_factory_name(self, cr, uid, ids, place_of_delivery_name=False, context=None):\n res= {}\n if place_of_delivery_name:\n service_obj= self.pool.get('supplier_factory_name_address.model')\n rec = service_obj.browse(cr, uid, place_of_delivery_name)\n res = {'value':{\n 'place_of_delivery_addr':rec.address,\n }}\n else:\n res={} \n return res\n\n # @api.onchange('beneficiary_full_name') \n # def beneficiary_full_name(self, cr, uid, ids, beneficiary_full_name=False, context=None):\n # res = {}\n\n # if beneficiary_full_name:\n # res = {'value':{\n # 'behalf_of':beneficiary_full_name,\n # }}\n # else:\n # res={} \n \n # return res\n\n\n # @api.onchange('bank_name','bank_branch') \n def onchange_bank_name_branch(self, cr, uid, ids, beneficiary_bank_name,beneficiary_bank_branch, context=None):\n bank_name_id = beneficiary_bank_name\n bank_branch_id = beneficiary_bank_branch\n if bank_name_id and bank_branch_id :\n service_obj= self.pool.get('bank_names_branch_address.model').search(cr, uid,[('name','=',bank_name_id),('branch','=',bank_branch_id),],context=context)\n bank_address_in_list = self.pool.get('bank_names_branch_address.model').read(cr, uid,service_obj,['address'], context=context)\n if len(bank_address_in_list) != 0:\n bank_address = self.split_bank_address(bank_address_in_list)\n res = {\n 'value': {\n 'beneficiary_bank_address': bank_address\n }\n }\n else :\n res = {\n 'value': {\n 'beneficiary_bank_address': ''\n }\n }\n else:\n res = {}\n return res\n\n \n def split_bank_address(self,bank_address_in_list):\n address= []\n idx = 0\n for r in bank_address_in_list:\n address.append(r['address']) \n combine = '\\n \\n \\n'.join([str(i) for i in address])\n return combine\n\n\n @api.multi\n def amount_to_text(self, amount_total):\n return amount_to_text(amount_total)\n\n def onchange_beneficiary_full_name(self, cr, uid, ids, beneficiary_full_name, context=None):\n beneficiary_full_name_id = beneficiary_full_name\n\n if beneficiary_full_name_id : \n service_obj= self.pool.get('beneficiary_full_name.model').browse(cr, uid,beneficiary_full_name_id,context=context)\n name = service_obj.name\n erc_no = service_obj.erc_no\n\n res = {\n 'value': { \n 'erc_no': erc_no,\n 'behalf_of': name\n }\n }\n else : \n res = {\n 'value': {\n 'erc_no': '',\n 'behalf_of': ''\n }\n } \n\n return res \n\n\n \n def numToWords(self,num,join=True):\n '''words = {} convert an integer number into words'''\n units = ['','one','two','three','four','five','six','seven','eight','nine']\n teens = ['','eleven','twelve','thirteen','fourteen','fifteen','sixteen', \\\n 'seventeen','eighteen','nineteen']\n tens = ['','ten','twenty','thirty','forty','fifty','sixty','seventy', \\\n 'eighty','ninety']\n thousands = ['','thousand','million','billion','trillion','quadrillion', \\\n 'quintillion','sextillion','septillion','octillion', \\\n 'nonillion','decillion','undecillion','duodecillion', \\\n 'tredecillion','quattuordecillion','sexdecillion', \\\n 'septendecillion','octodecillion','novemdecillion', \\\n 'vigintillion']\n words = []\n if num==0: words.append('zero')\n else:\n numStr = '%d'%num\n numStrLen = len(numStr)\n groups = (numStrLen+2)/3\n numStr = numStr.zfill(groups*3)\n for i in range(0,groups*3,3):\n h,t,u = int(numStr[i]),int(numStr[i+1]),int(numStr[i+2])\n g = groups-(i/3+1)\n if h>=1:\n words.append(units[h])\n words.append('hundred')\n if t>1:\n words.append(tens[t])\n if u>=1: words.append(units[u])\n elif t==1:\n if u>=1: words.append(teens[u])\n else: words.append(tens[t])\n else:\n if u>=1: words.append(units[u])\n if (g>=1) and ((h+t+u)>0): words.append(thousands[g]+',')\n if join: return ' '.join(words)\n return words"
},
{
"alpha_fraction": 0.6885043382644653,
"alphanum_fraction": 0.6897404193878174,
"avg_line_length": 46.235294342041016,
"blob_id": "e3eaf447457ae455440063c95f835b0b48d2b4e4",
"content_id": "e1e46464b4861a46855f58dd3a5c655abecafb71",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 809,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 17,
"path": "/models/bank_names_branch_address.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api\n\nclass bank_branch_address(models.Model):\n _name = 'bank_names_branch_address.model'\n\n\n name = fields.Char(compute='concatenate_custom_fields',store=True,string='Name')\n # bank_name = fields.Many2one('bank_names.model',required=True, string='Bank Name')\n bank_name = fields.Char(required=True, string='Bank Name')\n bank_branch = fields.Char(required=True, string='Bank Branch')\n bank_address = fields.Text(required=True, string='Bank Address')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n s_code = fields.Char(string='Swift Code')\n\n @api.depends('bank_name','bank_branch')\n def concatenate_custom_fields(self):\n self.name = str(self.bank_name) + ', ' + str(self.bank_branch) + ' '\n \n\n"
},
{
"alpha_fraction": 0.5020898580551147,
"alphanum_fraction": 0.5062695741653442,
"avg_line_length": 26.7391300201416,
"blob_id": "b415270823abfaf2b5582e8afc6d1f49d81d4d3e",
"content_id": "34f901e4328cb72b2e05fcb026e573f8d8665fae",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1914,
"license_type": "no_license",
"max_line_length": 189,
"num_lines": 69,
"path": "/models/summery_reports/proforma_invoice_status.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nfrom openerp import exceptions\n\nclass ProformaInvoiceStatus(models.Model):\n _name = 'proforma_invoice_status.model'\n\n name = fields.Char(string='Report No.')\n\n created_date = fields.Date(string='Created Date', default=fields.Date.today())\n \n from_date = fields.Date(string='From Date')\n to_date = fields.Date(string='To Date')\n\n\n test = fields.Text(string='Test')\n\n\n def onchange_from_date(self, cr, uid, ids, from_date=False, context=None):\n\n res= {}\n ffrom_date = from_date\n if ffrom_date:\n res = {'value':{\n 'from_date':ffrom_date,\n }}\n else:\n res = {'value':{\n 'from_date':'',\n }} \n return res\n\n def onchange_to_date(self, cr, uid, ids, from_date=False, to_date=False, context=None):\n # def onchange_to_date(self, cr, uid, ids, to_date=False):\n\n res= {}\n ffrom_date = from_date\n tto_date = to_date \n if tto_date:\n\n # cr = self.env.cr\n cr.execute(\"SELECT id, name, proforma_invoice_created_date FROM proforma_invoice_model WHERE proforma_invoice_created_date BETWEEN %s AND %s \", (str(ffrom_date),str(tto_date)) )\n datas = cr.fetchall()\n\n res = {'value':{\n 'test':datas,\n }}\n else:\n res = {'value':{\n 'test':'',\n }} \n return res \n\n\n\n # def get_selection(self, cr, uid, context):\n # res= {}\n # cr = self.env.cr\n # cr.execute(\"SELECT id,name FROM proforma_invoice_model WHERE proforma_invoice_created_date='2017-12-28' \")\n\n # datas = cr.fetchall()\n\n # if datas:\n # res = {'value':{\n # 'test':datas,\n # }}\n # else:\n # res= {} \n\n # return "
},
{
"alpha_fraction": 0.6077957153320312,
"alphanum_fraction": 0.6094399690628052,
"avg_line_length": 52.273197174072266,
"blob_id": "093306f7e08dd208d0a2a66075c88cc29f29ad52",
"content_id": "62606e0a3b6915c509923b18599edfc36cfb2ade",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10339,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 194,
"path": "/models/truck_challan.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\n\nclass TruckChallanModel(models.Model):\n _name = 'truck_challan.model'\n name = fields.Char(string='Truck Receipt No.', required=True)\n commercial_invoice_id = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n truck_challan_created_date = fields.Date(string='Created Date',default=fields.Date.today(), required=True)\n supplier_name = fields.Char(string='Supplier Name', required=True)\n supplier_address = fields.Text(string='Supplier Address', required=True)\n bank_name = fields.Char(string='Bank Name', required=True)\n bank_brunch = fields.Char(string='Bank Brunch', required=True)\n bank_addr = fields.Text(string='Bank Address', required=True)\n customer_name = fields.Char(string='Buyer Name', required=True)\n customer_full_address = fields.Text(string='Buyer Address', required=True)\n ordered_products_name = fields.Text(string='ordered_products_name') \n ordered_products_number_of_bags = fields.Text(string='ordered_products_number_of_bags') \n ordered_products_quantity = fields.Text(string='ordered_products_quantity')\n truck_no = fields.Char(string='Truck No.', required=True)\n total_bags = fields.Char(string='Total Bags')\n total_bags2 = fields.Char(string='Total Bags', required=True)\n ordered_products_total_quantity = fields.Char(string='ordered_products_total_quantity')\n total_gross_weight = fields.Char(string='Gross Weight', required=True)\n proforma_invoice_uniq_id = fields.Char(string='proforma_invoice_uniq_id', required=True)\n proforma_invoice_created_date = fields.Char(string='proforma_invoice_created_date', required=True)\n lc_num = fields.Char(string='L/C No.', required=True)\n lc_date = fields.Date(string='L/C Dated', required=True)\n contact_no = fields.Char(string='contact no', required=True)\n freight = fields.Char(string='Freight', required=True)\n\n def onchange_commercial_invoice_id(self, cr, uid, ids, commercial_invoice_id=False, context=None):\n res= {}\n if commercial_invoice_id:\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, commercial_invoice_id,context=context)\n commercial_invoice_no = all_data_of_commercial_invoice.name\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n proforma_invoice_created_date= all_data_of_commercial_invoice.proforma_invoice_created_date\n seq_num = all_data_of_commercial_invoice.only_seq_num\n contact_no= all_data_of_commercial_invoice.contact_no\n num_of_bags = all_data_of_commercial_invoice.num_of_bags\n supplier_factory_name = all_data_of_commercial_invoice.supplier_factory_name\n supplier_factory_address= all_data_of_commercial_invoice.supplier_factory_address\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context)\n lc_id = service_obj.lc_num_id\n # lc_info_id= all_data_of_commercial_invoice.lc_num\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n bank_name = lc_info_pool_ids.bank_name2\n bank_branch = lc_info_pool_ids.bank_branch\n bank_addr = lc_info_pool_ids.bank_address\n service_obj2= self.pool.get('res.partner').browse(cr, uid,service_obj.partner_id.id,context=context)\n service_obj3= self.pool.get('res.country').browse(cr, uid,service_obj2.country_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n cus_name = service_obj2.name\n cus_full_address = str(service_obj2.street) + \" , \" + str(service_obj2.street2) + \" , \" + str(service_obj2.city)+ \" - \" + str(service_obj2.zip) + \" , \" + str(service_obj3.name)\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise Warning(_('Account invoice list is empty.'))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context)\n invoice_lines_product_name = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['name'], context=context)\n invoice_lines_product_quantity = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['quantity','name'], context=context)\n\n ordered_products_names = self.split_products_names(invoice_lines_product_name) \n ordered_products_number_of_bags = self.split_products_number_of_bags(invoice_lines_product_quantity,num_of_bags)\n ordered_products_quantity = self.split_products_quantity(invoice_lines_product_quantity)\n total_bags = self.total_bags_in_quantity(invoice_lines_product_quantity,num_of_bags)\n ordered_products_total_quantity = self.products_total_quantity(invoice_lines_product_quantity)\n total_gross_weight = self.calculation_of_total_gross_weight(invoice_lines_product_quantity)\n res = {'value':{\n 'name' : seq_num,\n 'bank_name':bank_name,\n 'bank_brunch':bank_branch, \n 'bank_addr':bank_addr,\n 'customer_name':cus_name, \n 'customer_full_address':cus_full_address,\n 'ordered_products_name':ordered_products_names,\n 'ordered_products_number_of_bags':ordered_products_number_of_bags, \n 'ordered_products_quantity':ordered_products_quantity, \n 'total_bags':\"{:,}\".format( total_bags ),\n 'total_bags2':\"{:,}\".format( total_bags ),\n 'ordered_products_total_quantity':\"{:,}\".format( ordered_products_total_quantity ),\n 'total_gross_weight':total_gross_weight,\n 'proforma_invoice_uniq_id':proforma_invoice_uniq_id,\n 'proforma_invoice_created_date':proforma_invoice_created_date,\n 'lc_num':lc_num,\n 'lc_date':lc_date,\n 'contact_no':contact_no, \n 'supplier_name':supplier_factory_name, \n 'supplier_address':supplier_factory_address,\n }} \n else:\n res={} \n return res \n\n def split_products_names(self,invoice_lines_product_name):\n seen = set()\n answer = []\n names= []\n for r in invoice_lines_product_name:\n names.append(r['name'])\n combine_names = '\\n'.join([str(i) for i in names])\n for line in combine_names.splitlines():\n if line not in seen:\n seen.add(line)\n answer.append(line)\n combine = '\\n'.join(answer)\n return combine\n\n def split_products_number_of_bags(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n bags = int(num_of_bags)\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity'] / bags)\n except:\n d=item['name']\n testListDict[d] = int(item['quantity'] / bags)\n \n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in number_of_bags])\n return combine\n\n def total_bags_in_quantity(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n testListDict = {}\n bags = int(num_of_bags)\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(int(the_value / bags)) \n total = sum(number_of_bags)\n return total \n\n \n\n def split_products_quantity(self,invoice_lines_product_quantity):\n quantity= []\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n\n for the_key, the_value in testListDict.iteritems():\n quantity.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in quantity])\n return combine\n\n # def split_expected_delivery_date(self,expected_delivery_date):\n # date= []\n # for r in expected_delivery_date:\n # date.append(r['expected_delivery_date'])\n # combine = '\\n'.join([str(i) for i in date])\n # return combine\n\n def products_total_quantity(self,invoice_lines_product_quantity):\n total_quantity= []\n for r in invoice_lines_product_quantity: \n total_quantity.append(r['quantity'])\n in_com = sum(total_quantity)\n combine = int(in_com)\n return combine \n\n def calculation_of_total_gross_weight(self,invoice_lines_product_quantity):\n squantity = []\n testListDict = {}\n into = 1.04\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n squantity.append(the_value)\n result = [ x * into for x in squantity]\n gross = int(sum(result)) \n\n return gross \n"
},
{
"alpha_fraction": 0.5166112780570984,
"alphanum_fraction": 0.5166112780570984,
"avg_line_length": 37.64516067504883,
"blob_id": "c3e45fd1f8dca15828afc664f48c3f1db4387ffc",
"content_id": "281b9cf84fa07299a81c5b547a7d2c701fa1aa69",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1204,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 31,
"path": "/models/customer_invoices2.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\n\n\nclass CustomerInvoiceModel(models.Model):\n\n _inherit = 'account.invoice'\n\n pi_no = fields.Char(string='P/I No')\n\n # def create(self, cr, uid, vals, context=None):\n # if vals.get('origin'):\n # delivery_challan_no = vals.get('origin')\n # if delivery_challan_no :\n # stock_picking_ids = self.pool.get('stock.picking').search(cr, uid,[('name','=',delivery_challan_no),],context=context)\n # pi_num_list = self.pool.get('stock.picking').read(cr, uid,stock_picking_ids,['origin'], context=context)\n # if not pi_num_list:\n # pi_no = ''\n # vals['pi_no'] = pi_no\n # else: \n # pi_no = self.split_from_list(pi_num_list)\n # vals['pi_no'] = pi_no \n\n # new_id = super(CustomerInvoiceModel, self).create(cr, uid, vals, context=context) \n # return new_id\n\n # def split_from_list(self,list_name):\n # save = []\n # for r in list_name:\n # save.append(r['origin'])\n # combine = '\\n'.join([str(i) for i in save])\n # return combine \n"
},
{
"alpha_fraction": 0.5224336981773376,
"alphanum_fraction": 0.5224336981773376,
"avg_line_length": 35.787498474121094,
"blob_id": "337ae19dc261efbdeb5369c310d70699c3c6e8fa",
"content_id": "12184dc21d86bd360bba601338863878c8fa48f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2942,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 80,
"path": "/models/customer_invoices.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\n\n\n# Customer Invoice Model start\n\nclass CustomerInvoiceModel(models.Model):\n \n # _name = 'delivery.challan'\n _inherit = 'account.invoice'\n \n\n pi_no = fields.Char(string='P/I No')\n do_no = fields.Char(string='DO No')\n process = fields.Selection([('set_for_LC', 'set_for_LC'),('pandding', 'pandding')],'Process', default='pandding')\n process_status = fields.Char(string='Process', default='pandding')\n\n\n def create(self, cr, uid, vals, context=None):\n if vals.get('origin'):\n delivery_challan_no = vals.get('origin')\n if delivery_challan_no :\n stock_picking_ids = self.pool.get('stock.picking').search(cr, uid,[('name','=',delivery_challan_no),],context=context)\n pi_num_list = self.pool.get('stock.picking').read(cr, uid,stock_picking_ids,['origin','do_no'], context=context)\n if not pi_num_list:\n pi_no = ''\n vals['pi_no'] = pi_no\n else: \n pi_no = self.split_from_list(pi_num_list,'origin')\n vals['pi_no'] = pi_no \n\n do_no = self.split_from_list(pi_num_list,'do_no')\n vals['do_no'] = do_no \n\n new_id = super(CustomerInvoiceModel, self).create(cr, uid, vals, context=context) \n return new_id\n\n def split_from_list(self,list_name,data_field):\n save = []\n for r in list_name:\n save.append(r[data_field])\n combine = '\\n'.join([str(i) for i in save])\n return combine \n\n def onchange_process(self, cr, uid, ids, process=False, context=None): \n res= {}\n if process:\n res = {'value':{\n 'process_status':process,\n }}\n else:\n res={} \n return res \n\n\n# Customer Invoice Model end\n\n# def create(self, cr, uid, vals, context=None):\n# if vals.get('origin'):\n# delivery_challan_no = vals.get('origin')\n\n# stock_picking_ids = self.pool.get('stock.picking').search(cr, uid,[('name','=',delivery_challan_no),],context=context)\n# if stock_picking_ids:\n# pi_num_list = self.pool.get('stock.picking').read(cr, uid,stock_picking_ids,['origin'], context=context)\n\n# if not pi_num_list:\n# pi_no = None\n# vals['pi_no'] = pi_no\n# else : \n# pi_no = self.split_from_list(pi_num_list)\n# vals['pi_no'] = pi_no \n\n# new_id = super(CustomerInvoiceModel, self).create(cr, uid, vals, context=context) \n# return new_id\n\n# def split_from_list(self,list_name,data_field):\n# save = []\n# for r in list_name:\n# save.append(r['origin'])\n# combine = '\\n'.join([str(i) for i in save])\n# return combine"
},
{
"alpha_fraction": 0.5346715450286865,
"alphanum_fraction": 0.5346715450286865,
"avg_line_length": 29.5,
"blob_id": "9ee25307656830fbddda70ce8e186ccbacf201d9",
"content_id": "7341def9030cabebb6bd6d9d17241ece21d68196",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 548,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 18,
"path": "/static/src/js/myscript.js",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "// odoo.define('LC_report_generator.myscript', function(require) {\n// 'use strict';\n // alert('hello i am working');\n\n // $(\".oe_list_record_selector input[type=checkbox]\").change(function () {\n // alert('hello i am working');\n // }); \n // th.oe_list_record_selector input:checkbox[name=\"radiogroup\"]\n \n // $(document).ready(function() {\n\n // $('th.oe_list_record_selector input:checkbox[name=\"radiogroup\"]').change(function() {\n // alert('type A');\n // }); \n\n\n // });\n// });"
},
{
"alpha_fraction": 0.6466951966285706,
"alphanum_fraction": 0.6474084854125977,
"avg_line_length": 47.3563232421875,
"blob_id": "e607bbe445fcc61a997314a32d39d8db33d8c56f",
"content_id": "525a8a23d0861b040ba082468bbaa920cc87f43d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4206,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 87,
"path": "/models/certificate_of_origin.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nimport datetime\n\n# packing list Model start\n\nclass BeneficiaryCertificateModel(models.Model):\n _name = 'certificate_of_origin.model'\n\n commercial_invoice_id = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n name = fields.Char(string='Ref.No', required=True)\n date = fields.Date(string='Created Date', required=True,default=fields.Date.today())\n\n supplied_product = fields.Char(string='Supplied Product', required=True, default='\"Yarn\"')\n lc_num = fields.Char(string='L/C No.', required=True)\n lc_date = fields.Date(string='lc_date', required=True)\n contact_no = fields.Char(string='contact_no', required=True)\n commercial_invoice_no = fields.Char(string='Commercial Invoice no', required=True)\n commercial_invoice_created_date = fields.Date(string='commercial_invoice_created_date', required=True)\n delivery_challan_no = fields.Char(string='Delivery Challan No.', required=True)\n delivery_challan_created_date = fields.Date(string='delivery_challan_created_date', required=True)\n country_of_origin = fields.Char(string='Country Of Origin', required=True)\n dealer_factory_name = fields.Char(string='Delivery From', required=True)\n\n\n\n # This function is for load data automatically in the existing field from another table\n def onchange_commercial_invoice_id(self, cr, uid, ids, name=False, context=None):\n res= {}\n if name:\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, name,context=context)\n\n # cus_invoice_id = all_data_of_commercial_invoice.customer_invoice_id\n contact_no = all_data_of_commercial_invoice.contact_no\n commercial_invoice_no = all_data_of_commercial_invoice.name\n commercial_invoice_created_date = all_data_of_commercial_invoice.commercial_invoice_created_date\n delivery_challan_no = all_data_of_commercial_invoice.only_seq_num\n country_of_origin = all_data_of_commercial_invoice.country_of_origin2\n supplier_factory_address= all_data_of_commercial_invoice.supplier_factory_address\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context)\n lc_id = service_obj.lc_num_id\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n\n\n delivery_challan_datas = self.pool.get('delivery_challan.model').search(cr, uid,[('commercial_invoice_id','=',name),],context=context)\n\n dates = self.pool.get('delivery_challan.model').read(cr, uid,delivery_challan_datas, ['delivery_challan_created_date'], context=context)\n\n if dates: \n delivery_challan_created_date = self.split_delivery_challan_created_date(dates)\n else:\n delivery_challan_created_date = '' \n\n\n now = datetime.datetime.now()\n uniq_num = 'AAYML-CO/'+str(now.year)\n\n res = {'value':{\n 'name': uniq_num,\n 'lc_num':lc_num,\n 'lc_date':lc_date,\n 'contact_no':contact_no, \n 'commercial_invoice_no':commercial_invoice_no, \n 'commercial_invoice_created_date':commercial_invoice_created_date,\n 'delivery_challan_no':delivery_challan_no, \n 'delivery_challan_created_date':delivery_challan_created_date,\n 'country_of_origin':country_of_origin,\n 'dealer_factory_name':supplier_factory_address,\n }}\n\n\n else:\n res={} \n return res \n\n\n def split_delivery_challan_created_date(self,dates):\n names= []\n idx = 0\n for r in dates:\n names.append(r['delivery_challan_created_date'])\n combine = '\\n \\n'.join([str(i) for i in names]) \n return combine"
},
{
"alpha_fraction": 0.7386666536331177,
"alphanum_fraction": 0.746666669845581,
"avg_line_length": 40.66666793823242,
"blob_id": "66907d392da42b8a30d73972cd9ae2705fbe5122",
"content_id": "bfc1afdb5b4b12140cf5e4f7a0c125cb3157bf25",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 375,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 9,
"path": "/models/customer_factory_name_addr.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields\n\nclass CustomerFactoryNameAddress(models.Model):\n _name = 'customer_factory_name_address.model' \n\n\n name = fields.Char(required=True, string='Customer Factory Name',size=250)\n address = fields.Text(required=True, string='Customer Factory Address')\n date = fields.Date('Created date', required=True, default=fields.Date.today())\n"
},
{
"alpha_fraction": 0.5417396426200867,
"alphanum_fraction": 0.5423234105110168,
"avg_line_length": 37.0444450378418,
"blob_id": "663fc456e2aea1b9da3cb9e0d3eec56ad8441b17",
"content_id": "399e89190fe637a911dec18162450e001721cd81",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1713,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 45,
"path": "/models/supplier_factory_name_addr.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api\n\nclass SupplierFactoryNameAddress(models.Model):\n _name = 'supplier_factory_name_address.model' \n\n name = fields.Char(compute='concatenate_custom_fields',store=True,string='name')\n factory_name = fields.Char(required=True, string='Supplier Factory Name')\n address = fields.Text(required=True, string='Supplier Factory Address')\n company_id = fields.Many2one('res.company', string='Company',required=True)\n company_name= fields.Char(string='company_name')\n date = fields.Date('Created date', required=True, default=fields.Date.today())\n\n \n @api.depends('company_name','factory_name')\n def concatenate_custom_fields(self):\n self.name = str(self.company_name) + ', ' + str(self.factory_name) + ' '\n\n def company_id_onchange(self, cr, uid, ids, company_id=False, context=None): \n res = {}\n if company_id: \n \n service_obj= self.pool.get('res.company')\n rec = service_obj.browse(cr, uid, company_id)\n \n res= {\n 'value':\n { \n 'company_name':rec.name,\n }\n } \n else:\n res={\n 'value':\n {\n 'company_name':' ',\n }\n } \n\n return res\n\n # def _get_default_company(self, cr, uid, context=None):\n # company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)\n # if not company_id:\n # raise osv.except_osv(_('Error!'), _('There is no default company for the current user!'))\n # return company_id\n\n"
},
{
"alpha_fraction": 0.6015703678131104,
"alphanum_fraction": 0.6057982444763184,
"avg_line_length": 48.92462158203125,
"blob_id": "add8dda9740c7df745eef7b7ef1304dcca3de499",
"content_id": "b952667f1cd05f56f0c8013980b15aecdffb0ed5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9934,
"license_type": "no_license",
"max_line_length": 216,
"num_lines": 199,
"path": "/models/bill_of_exchange.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nimport datetime\nfrom openerp.exceptions import except_orm, Warning, RedirectWarning\n\n# packing list Model start\n\nclass BillOfExchangeModel(models.Model):\n _name = 'bill_of_exchange.model'\n\n name = fields.Char(string='name')\n bill_of_exchange_created_date = fields.Date(string='Created Date', default=fields.Date.today(), required=True)\n\n\n commercial_invoice_id = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n\n ordered_products_total_amount = fields.Char(string='ordered_products_total_amount', required=True)\n ordered_products_total_amount_in_word = fields.Char(string='ordered_products_total_amount_in_word', required=True)\n currency_symbol_name = fields.Char(string='currency_symbol_name', required=True)\n currency_symbol_name2 = fields.Char(string='currency_symbol_name', required=True)\n currency_symbol = fields.Char(string='currency_symbol', required=True)\n currency_symbol2 = fields.Char(string='currency_symbol', required=True)\n days = fields.Char(string='days', required=True, default='90')\n\n bank_name = fields.Char(string='Bank Name' , required=True)\n bank_brunch = fields.Char(string='Bank Brunch' , required=True)\n bank_address = fields.Char(string='Bank Address' , required=True)\n swift_code = fields.Char(string='Swift Code' , required=True)\n\n customer_name = fields.Char(string='Customer Name', required=True)\n customer_full_address = fields.Text(string='Customer Address' , required=True)\n\n lc_num = fields.Char(string='L/C No' , required=True)\n lc_date = fields.Date(string='L/C Date', required=True)\n lc_bank_name = fields.Char(string='lc_bank_name' , required=True)\n lc_bank_name2 = fields.Char(string='lc_bank_name' , required=True)\n lc_bank_brunch = fields.Char(string='lc_bank_branch' , required=True)\n lc_bank_address = fields.Char(string='lc_bank_address' , required=True)\n\n contact_no = fields.Char(string='contact_no', required=True)\n\n company_name = fields.Char(string='Company name', required=True)\n\n pi_no = fields.Char(string='pi_no')\n\n commercial_invoice_name = fields.Char(string='pi_no')\n\n document_status = fields.Char(string='Document Status', default='set_for_LC')\n\n # This function is for load data automatically in the existing field from another table\n def onchange_commercial_invoice_id(self, cr, uid, ids, name=False, context=None):\n res= {}\n if name:\n\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, name,context=context)\n commercial_invoice_name = all_data_of_commercial_invoice.name\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n contact_no = all_data_of_commercial_invoice.contact_no\n\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context)\n\n beneficiary_bank_name = service_obj.beneficiary_bank_name2 \n beneficiary_bank_branch = service_obj.beneficiary_bank_branch\n beneficiary_bank_address = service_obj.beneficiary_bank_address\n swift_code = service_obj.swift_code\n company_name = service_obj.benificiary_name\n service_obj2= self.pool.get('res.partner').browse(cr, uid,service_obj.partner_id.id,context=context)\n service_obj3= self.pool.get('res.country').browse(cr, uid,service_obj2.country_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n\n cus_name = service_obj2.name\n cus_full_address = str(service_obj2.street) + \" , \" + str(service_obj2.street2) + \" , \" + str(service_obj2.city)+ \" - \" + str(service_obj2.zip) + \" , \" + str(service_obj3.name)\n\n lc_id = service_obj.lc_num_id\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n lc_bank_name = lc_info_pool_ids.bank_name2\n lc_bank_branch = lc_info_pool_ids.bank_branch\n lc_bank_address = lc_info_pool_ids.bank_address\n\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise Warning(_('Account invoice list is empty.'))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context)\n\n invoice_lines_product_amount = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_subtotal','name'], context=context)\n\n ordered_products_total_amount = self.products_total_amount(invoice_lines_product_amount)\n ordered_products_total_amount_in_word = self.numToWords(ordered_products_total_amount)\n\n res = {'value':{\n 'ordered_products_total_amount': \"{:,}\".format( ordered_products_total_amount) ,\n 'ordered_products_total_amount_in_word':ordered_products_total_amount_in_word,\n 'bank_name':beneficiary_bank_name,\n 'bank_brunch':beneficiary_bank_branch,\n 'bank_address':beneficiary_bank_address,\n 'swift_code':swift_code,\n 'currency_symbol_name':currency_symbol.name,\n 'currency_symbol':currency_symbol.symbol,\n 'currency_symbol_name2':currency_symbol.name,\n 'currency_symbol2':currency_symbol.symbol,\n 'customer_name':cus_name,\n 'customer_full_address':cus_full_address,\n 'lc_num':lc_num,\n 'lc_date':lc_date,\n 'lc_bank_name':lc_bank_name,\n 'lc_bank_name2':lc_bank_name,\n 'lc_bank_brunch':lc_bank_branch,\n 'lc_bank_address':lc_bank_address,\n 'contact_no':contact_no, \n 'company_name':company_name,\n 'pi_no':proforma_invoice_uniq_id,\n 'commercial_invoice_name':commercial_invoice_name\n }}\n\n else:\n res={} \n return res \n\n @api.multi\n def confirm_lc(self):\n pi_no = self.pi_no\n commercial_invoice_name = self.commercial_invoice_name\n process_status_done = 'Done'\n process_status_set_for_LC = 'set_for_LC'\n self.write({})\n\n self._cr.execute(\"SELECT id FROM bill_of_exchange_model WHERE commercial_invoice_name = %s AND document_status = %s\",(commercial_invoice_name,process_status_set_for_LC))\n lines = self.env['bill_of_exchange.model'].browse([r[0] for r in self._cr.fetchall()])\n\n if lines:\n for inv in self:\n self._cr.execute(\"UPDATE bill_of_exchange_model SET document_status=%s WHERE commercial_invoice_name=%s AND document_status=%s\",(process_status_done,commercial_invoice_name,process_status_set_for_LC))\n self._cr.execute(\"UPDATE account_invoice SET process=%s,process_status=%s WHERE pi_no=%s AND process=%s\",(process_status_done,process_status_done,pi_no,process_status_set_for_LC))\n self.invalidate_cache()\n \n else:\n raise except_orm(_('else')) \n \n return True \n\n def split_from_list(self,list_name,data_field):\n save = []\n for r in list_name:\n save.append(r[data_field])\n combine = '\\n'.join([str(i) for i in save])\n return combine\n\n\n\n def products_total_amount(self,invoice_lines_product_amount):\n total_amount= []\n idx = 0\n for r in invoice_lines_product_amount:\n total_amount.append(r['price_subtotal'])\n combine = sum(total_amount)\n return combine\n\n\n def numToWords(self,num,join=True):\n '''words = {} convert an integer number into words'''\n units = ['','one','two','three','four','five','six','seven','eight','nine']\n teens = ['','eleven','twelve','thirteen','fourteen','fifteen','sixteen', \\\n 'seventeen','eighteen','nineteen']\n tens = ['','ten','twenty','thirty','forty','fifty','sixty','seventy', \\\n 'eighty','ninety']\n thousands = ['','thousand','million','billion','trillion','quadrillion', \\\n 'quintillion','sextillion','septillion','octillion', \\\n 'nonillion','decillion','undecillion','duodecillion', \\\n 'tredecillion','quattuordecillion','sexdecillion', \\\n 'septendecillion','octodecillion','novemdecillion', \\\n 'vigintillion']\n words = []\n if num==0: words.append('zero')\n else:\n numStr = '%d'%num\n numStrLen = len(numStr)\n groups = (numStrLen+2)/3\n numStr = numStr.zfill(groups*3)\n for i in range(0,groups*3,3):\n h,t,u = int(numStr[i]),int(numStr[i+1]),int(numStr[i+2])\n g = groups-(i/3+1)\n if h>=1:\n words.append(units[h])\n words.append('hundred')\n if t>1:\n words.append(tens[t])\n if u>=1: words.append(units[u])\n elif t==1:\n if u>=1: words.append(teens[u])\n else: words.append(tens[t])\n else:\n if u>=1: words.append(units[u])\n if (g>=1) and ((h+t+u)>0): words.append(thousands[g]+',')\n if join: return ' '.join(words)\n return words"
},
{
"alpha_fraction": 0.7376543283462524,
"alphanum_fraction": 0.7376543283462524,
"avg_line_length": 34.88888931274414,
"blob_id": "84a22a0bfb1e8648e6b1d9db030b12f84b69e6aa",
"content_id": "a442b596dda4258daedfc5568d6469664eb8916c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 324,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 9,
"path": "/models/terms_of_delivery.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass Product_Type(models.Model):\n _name = 'terms_of_delivery.model'\n\n\n name = fields.Text(required=True, string='Terms Of Delivery')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n\n"
},
{
"alpha_fraction": 0.7363343834877014,
"alphanum_fraction": 0.7363343834877014,
"avg_line_length": 33.44444274902344,
"blob_id": "81dd1d19c25b3913133791e6959057b7998863d9",
"content_id": "1ba44a9b2bf4cb9a42b344daaf659723d6c8178c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 311,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 9,
"path": "/models/bank_names.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass LCinformations(models.Model):\n _name = 'bank_names.model'\n\n\n name = fields.Char(required=True, string='Bank Name')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n\n"
},
{
"alpha_fraction": 0.6068304777145386,
"alphanum_fraction": 0.6084964871406555,
"avg_line_length": 47.26633071899414,
"blob_id": "fe59beb0f17df6a7d4cbf0f0908275e846f180e2",
"content_id": "f5f34783a180cced37a90ec1a4b5055f3aefa7c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9604,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 199,
"path": "/models/delivery_challan.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\n\n# packing list Model start\n\nclass PackingListModel(models.Model):\n _name = 'delivery_challan.model'\n\n name = fields.Char(string='Delivery Challan No.', required=True)\n delivery_challan_created_date = fields.Date(string='Created Date', required=True,default=fields.Date.today())\n\n commercial_invoice_id = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n\n customer_name = fields.Char(string='Buyer',required=True)\n customer_full_address = fields.Text(string='Buyer Address',required=True)\n\n ordered_products_name = fields.Text(string='ordered_products_name') \n ordered_products_number_of_bags = fields.Text(string='ordered_products_number_of_bags') \n ordered_products_quantity = fields.Text(string='ordered_products_quantity')\n total_bags = fields.Char(string='Total bags')\n ordered_products_total_quantity = fields.Char(string='Products total quantity') \n delivery_order_no = fields.Char(string='Delivery Order No', required=True)\n do_date = fields.Date(string='D/O date', required=True)\n\n gross_weight = fields.Char(string='Gross weight',required=True)\n total_bags2 = fields.Char(string='total_bags', required=True)\n proforma_invoice_uniq_id = fields.Char(string='proforma_invoice_uniq_id', required=True)\n proforma_invoice_created_date = fields.Char(string='proforma_invoice_created_date', required=True)\n lc_num = fields.Char(string='L/C No.', required=True)\n lc_date = fields.Date(string='L/C Dated', required=True)\n contact_no = fields.Char(string='contact no', required=True)\n\n\n # This function is for load data automatically in the existing field from another table\n def onchange_commercial_invoice_id(self, cr, uid, ids, name=False, context=None):\n res= {}\n if name:\n\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, name,context=context)\n cus_invoice_id = all_data_of_commercial_invoice.customer_invoice_id\n seq_num = all_data_of_commercial_invoice.only_seq_num\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n proforma_invoice_created_date= all_data_of_commercial_invoice.proforma_invoice_created_date\n contact_no= all_data_of_commercial_invoice.contact_no\n delivery_order_num= all_data_of_commercial_invoice.delivery_order_num\n delivery_challan_num= all_data_of_commercial_invoice.delivery_challan_num\n delivery_order_created_date= all_data_of_commercial_invoice.delivery_order_created_date\n num_of_bags = all_data_of_commercial_invoice.num_of_bags \n\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context)\n lc_id = service_obj.lc_num_id\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n\n service_obj2= self.pool.get('res.partner').browse(cr, uid,service_obj.partner_id.id,context=context)\n service_obj3= self.pool.get('res.country').browse(cr, uid,service_obj2.country_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n cus_name = service_obj2.name\n cus_full_address = str(service_obj2.street) + \" , \" + str(service_obj2.street2) + \" , \" + str(service_obj2.city)+ \" - \" + str(service_obj2.zip) + \" , \" + str(service_obj3.name)\n\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise Warning(_('Account invoice list is empty.'))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context)\n invoice_lines_product_name = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['name'], context=context)\n invoice_lines_product_quantity = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['quantity','name'], context=context)\n \n ordered_products_names = self.split_products_names(invoice_lines_product_name) \n\n ordered_products_number_of_bags = self.split_products_number_of_bags(invoice_lines_product_quantity,num_of_bags)\n\n ordered_products_quantity = self.split_products_quantity(invoice_lines_product_quantity)\n\n total_bags = self.total_bags_in_quantity(invoice_lines_product_quantity,num_of_bags)\n\n ordered_products_total_quantity = self.products_total_quantity(invoice_lines_product_quantity)\n\n total_gross_weight = self.calculation_of_total_gross_weight(invoice_lines_product_quantity)\n\n\n\n res = {'value':{\n 'name': seq_num,\n # 'name': delivery_challan_num,\n 'customer_name':cus_name, \n 'customer_full_address':cus_full_address,\n 'ordered_products_name':ordered_products_names,\n 'ordered_products_number_of_bags':ordered_products_number_of_bags, \n 'ordered_products_quantity':ordered_products_quantity, \n 'total_bags':\"{:,}\".format( total_bags ),\n 'ordered_products_total_quantity':\"{:,}\".format( ordered_products_total_quantity ),\n 'gross_weight':total_gross_weight,\n 'total_bags2':\"{:,}\".format( total_bags ),\n 'proforma_invoice_uniq_id':proforma_invoice_uniq_id,\n 'proforma_invoice_created_date':proforma_invoice_created_date,\n 'lc_num':lc_num,\n 'lc_date':lc_date,\n 'contact_no':contact_no,\n 'delivery_order_no':delivery_order_num,\n 'do_date':delivery_order_created_date,\n }}\n else:\n res={} \n return res \n\n\n\n\n def split_products_names(self,invoice_lines_product_name):\n seen = set()\n answer = []\n names= []\n for r in invoice_lines_product_name:\n names.append(r['name'])\n combine_names = '\\n'.join([str(i) for i in names])\n for line in combine_names.splitlines():\n if line not in seen:\n seen.add(line)\n answer.append(line)\n combine = '\\n'.join(answer)\n return combine\n\n def split_products_number_of_bags(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n bags = int(num_of_bags)\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity'] / bags)\n except:\n d=item['name']\n testListDict[d] = int(item['quantity'] / bags)\n \n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in number_of_bags])\n return combine\n\n def total_bags_in_quantity(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n testListDict = {}\n bags = int(num_of_bags)\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(int(the_value / bags)) \n total = sum(number_of_bags)\n return total \n\n def split_products_quantity(self,invoice_lines_product_quantity):\n quantity= []\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n\n for the_key, the_value in testListDict.iteritems():\n quantity.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in quantity])\n return combine\n\n def products_total_quantity(self,invoice_lines_product_quantity):\n total_quantity= []\n for r in invoice_lines_product_quantity:\n total_quantity.append(r['quantity'])\n in_com = sum(total_quantity)\n combine = int(in_com)\n return combine \n\n def calculation_of_total_gross_weight(self,invoice_lines_product_quantity):\n squantity = []\n testListDict = {}\n into = 1.04\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n squantity.append(the_value)\n result = [ x * into for x in squantity]\n gross = int(sum(result)) \n\n return gross"
},
{
"alpha_fraction": 0.7375327944755554,
"alphanum_fraction": 0.7375327944755554,
"avg_line_length": 37,
"blob_id": "508db43819d5a0aa2793cd92b1f925581f38ff68",
"content_id": "fb876375da30424ceffcc38b8b4b1171366244c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 381,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 10,
"path": "/models/beneficiary_full_name.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass BeneficiaryFullName(models.Model):\n _name = 'beneficiary_full_name.model'\n\n\n name = fields.Char(required=True, string='Beneficiary Full Name')\n erc_no = fields.Char(string='ERC No')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n\n"
},
{
"alpha_fraction": 0.6414612531661987,
"alphanum_fraction": 0.6420998573303223,
"avg_line_length": 51.885135650634766,
"blob_id": "8d184dd4f64db0afe31d0ce1719206ebcb6608c1",
"content_id": "a0e6ec15c47ce06a73f625ecf95b7ffca5d1eb92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7829,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 148,
"path": "/models/beneficiary_certificate.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nimport datetime\nimport time\n# packing list Model start\n\nclass BeneficiaryCertificateModel(models.Model):\n _name = 'beneficiary_certificate.model'\n name = fields.Char(string='Ref.No', required=True)\n # name = fields.Char(string='Ref.No', required=True, default=lambda *a: time.strftime('%Y'))\n commercial_invoice_id = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n \n date = fields.Date(string=' Created Date',default=fields.Date.today(), required=True)\n\n ordered_products_total_quantity = fields.Char(string='ordered_products_total_quantity', required=True)\n commodity = fields.Char(string='commodity', required=True)\n customer_name = fields.Char(string='Buyer', required=True) \n customer_full_address = fields.Char(string='Buyer Address', required=True) \n commercial_invoice_no = fields.Char(string='Commercial Invoice no', required=True)\n commercial_invoice_created_date = fields.Date(string='commercial_invoice_created_date', required=True)\n proforma_invoice_no = fields.Char(string='proforma_invoice_no', required=True)\n proforma_invoice_created_date = fields.Date(string='proforma_invoice_created_date', required=True)\n\n truck_receipt_no = fields.Char(string='Truck Receipt No', required=True) \n truck_challan_created_date = fields.Date(string='truck_challan_created_date', required=True)\n\n delivery_challan_no = fields.Char(string='Delivery Challan No.', required=True)\n delivery_challan_created_date = fields.Date(string='delivery_challan_created_date', required=True)\n\n lc_num = fields.Char(string='L/C No.', required=True)\n lc_date = fields.Date(string='lc_date', required=True)\n contact_no = fields.Char(string='contact_no', required=True)\n dealer_factory_name = fields.Char(string='Delivery From', required=True)\n\n \n # This function is for load data automatically in the existing field from another table\n def onchange_commercial_invoice_id(self, cr, uid, ids, name=False, context=None):\n res= {}\n if name:\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, name,context=context)\n # cus_invoice_id = all_data_of_commercial_invoice.customer_invoice_id\n cus_name = all_data_of_commercial_invoice.customer_name\n customer_full_address = all_data_of_commercial_invoice.customer_full_address\n commercial_invoice_no = all_data_of_commercial_invoice.name\n commercial_invoice_created_date = all_data_of_commercial_invoice.commercial_invoice_created_date\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n proforma_invoice_created_date = all_data_of_commercial_invoice.proforma_invoice_created_date\n contact_no = all_data_of_commercial_invoice.contact_no\n only_seq_num = all_data_of_commercial_invoice.only_seq_num\n supplier_factory_address= all_data_of_commercial_invoice.supplier_factory_address\n\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context)\n lc_id = service_obj.lc_num_id\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise Warning(_('Account invoice list is empty.'))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context)\n\n # invoice_lines_product_name = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['name'], context=context)\n\n invoice_lines_product_quantity = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['quantity'], context=context)\n\n ordered_products_total_quantity = self.products_total_quantity(invoice_lines_product_quantity)\n\n packing_list_pool_ids = self.pool.get('packing_list.model').search(cr, uid,[('commercial_invoice_no','=',commercial_invoice_no),],context=context)\n\n commodity_names = self.pool.get('packing_list.model').read(cr, uid,packing_list_pool_ids, ['commodity'], context=context)\n\n commodity = self.split_commodity(commodity_names)\n\n truck_challan_datas = self.pool.get('truck_challan.model').search(cr, uid,[('commercial_invoice_id','=',name),],context=context)\n\n dates = self.pool.get('truck_challan.model').read(cr, uid,truck_challan_datas, ['truck_challan_created_date'], context=context)\n\n truck_challan_created_date = self.split_truck_challan_created_date(dates)\n\n delivery_challan_datas = self.pool.get('delivery_challan.model').search(cr, uid,[('commercial_invoice_id','=',name),],context=context)\n\n dates = self.pool.get('delivery_challan.model').read(cr, uid,delivery_challan_datas, ['delivery_challan_created_date'], context=context)\n\n delivery_challan_created_date = self.split_delivery_challan_created_date(dates)\n\n now = datetime.datetime.now()\n uniq_num = 'AAYML-CERT/'+str(now.year)\n\n res = {'value':{\n 'name': uniq_num,\n 'ordered_products_total_quantity':ordered_products_total_quantity,\n 'customer_name':cus_name, \n 'customer_full_address':customer_full_address, \n 'commercial_invoice_no':commercial_invoice_no, \n 'commercial_invoice_created_date':commercial_invoice_created_date,\n 'proforma_invoice_no':proforma_invoice_uniq_id,\n 'proforma_invoice_created_date':proforma_invoice_created_date,\n 'lc_num':lc_num,\n 'lc_date':lc_date,\n 'contact_no':contact_no, \n 'commodity':commodity,\n 'truck_receipt_no':only_seq_num,\n 'truck_challan_created_date':truck_challan_created_date,\n 'delivery_challan_no':only_seq_num, \n 'delivery_challan_created_date':delivery_challan_created_date,\n 'dealer_factory_name':supplier_factory_address,\n }}\n\n else:\n res={} \n return res \n\n\n def products_total_quantity(self,invoice_lines_product_quantity):\n total_quantity= []\n idx = 0\n for r in invoice_lines_product_quantity:\n total_quantity.append(r['quantity'])\n in_com = sum(total_quantity)\n combine = int(in_com)\n return combine \n\n def split_commodity(self,commodity_names):\n names= []\n idx = 0\n for r in commodity_names:\n names.append(r['commodity'])\n combine = '\\n \\n'.join([str(i) for i in names]) \n return combine\n\n def split_truck_challan_created_date(self,dates):\n names= []\n idx = 0\n for r in dates:\n names.append(r['truck_challan_created_date'])\n combine = '\\n \\n'.join([str(i) for i in names]) \n return combine\n\n def split_delivery_challan_created_date(self,dates):\n names= []\n idx = 0\n for r in dates:\n names.append(r['delivery_challan_created_date'])\n combine = '\\n \\n'.join([str(i) for i in names]) \n return combine\n\n\n"
},
{
"alpha_fraction": 0.6280006170272827,
"alphanum_fraction": 0.6323369741439819,
"avg_line_length": 49.453125,
"blob_id": "bd577461223057c29c7f39cc32dc05a2b5e56efd",
"content_id": "d8f1ca86adeecd0b072ca4b5d1dae1e2b1a02430",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6457,
"license_type": "no_license",
"max_line_length": 203,
"num_lines": 128,
"path": "/models/forwarding_letter.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nimport datetime\n\n# packing list Model start\n\nclass ForwardingLetterModel(models.Model):\n _name = 'forwarding_letter.model'\n\n commercial_invoice_id = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n name = fields.Char(string='Ref.No', required=True)\n date = fields.Date(string='Created Date',default=fields.Date.today(), required=True)\n\n for_whom = fields.Char(string='Forwarding To', required=True)\n bank_name = fields.Char(string='Bank Name', required=True)\n bank_brunch = fields.Char(string='Bank Branch', required=True)\n bank_address = fields.Text(string='Bank Address', required=True)\n swift_code = fields.Char(string='Swift Code', required=True)\n\n\n lc_num = fields.Char(string='L/C No', required=True)\n lc_date = fields.Date(string='L/C Date', required=True)\n lc_num2 = fields.Char(string='lc_num', required=True)\n lc_date2 = fields.Date(string='lc_date', required=True)\n lc_bank_name = fields.Char(string='lc_bank_name', required=True)\n lc_bank_brunch = fields.Char(string='lc_bank_brunch', required=True)\n lc_bank_address = fields.Char(string='lc_bank_address', required=True)\n currency_symbol = fields.Char(string='currency_symbol', required=True)\n ordered_products_total_amount = fields.Char(string='Total', required=True)\n\n transfer_per = fields.Integer(string='Transfer Per', required=True)\n fc_account_no = fields.Char(string='F/C Account No', required=True)\n\n c1 = fields.Char(string='c1', required=True)\n c2 = fields.Char(string='c2', required=True)\n c3 = fields.Char(string='c3', required=True)\n c4 = fields.Char(string='c4', required=True)\n c5 = fields.Char(string='c5', required=True)\n c6 = fields.Char(string='c6', required=True)\n c7 = fields.Char(string='c7', required=True)\n c8 = fields.Char(string='c8', required=True)\n c9 = fields.Char(string='c9', required=True)\n\n\n\n # This function is for load data automatically in the existing field from another table\n def onchange_commercial_invoice_id(self, cr, uid, ids, name=False, context=None):\n res= {}\n if name:\n\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, name,context=context)\n # cus_invoice_id = all_data_of_commercial_invoice.customer_invoice_id\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context) \n # sale_order_id = self.pool.get('sale.order').search(cr, uid,[('name','=',proforma_invoice_id),],context=context)\n # sale_order_data_list = self.pool.get('sale.order').read(cr, uid,sale_order_id,['beneficiary_bank_name2', 'beneficiary_bank_branch','beneficiary_bank_address','swift_code'], context=context)\n # beneficiary_bank_name = self.split_from_list(sale_order_data_list,'beneficiary_bank_name2')\n # beneficiary_bank_branch = self.split_from_list(sale_order_data_list,'beneficiary_bank_branch')\n # beneficiary_bank_address = self.split_from_list(sale_order_data_list,'beneficiary_bank_address')\n # swift_code = self.split_from_list(sale_order_data_list,'swift_code')\n\n beneficiary_bank_name = service_obj.beneficiary_bank_name2 \n beneficiary_bank_branch = service_obj.beneficiary_bank_branch\n beneficiary_bank_address = service_obj.beneficiary_bank_address\n swift_code = service_obj.swift_code\n # service_obj= self.pool.get('account.invoice').browse(cr, uid,cus_invoice_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n \n lc_id = service_obj.lc_num_id\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n lc_bank_name = lc_info_pool_ids.bank_name2\n lc_bank_branch = lc_info_pool_ids.bank_branch\n lc_bank_address = lc_info_pool_ids.bank_address\n\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise Warning(_('Account invoice list is empty.'))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context)\n\n invoice_lines_product_amount = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_subtotal','name'], context=context)\n\n ordered_products_total_amount = self.products_total_amount(invoice_lines_product_amount)\n\n \n\n now = datetime.datetime.now()\n uniq_num = 'AAYML-CERT/'+str(now.year)\n\n res = {'value':{\n 'name': uniq_num,\n 'lc_num':lc_num,\n 'lc_date':lc_date,\n 'lc_num2':lc_num, \n 'lc_date2':lc_date,\n 'lc_bank_name':lc_bank_name,\n 'lc_bank_brunch':lc_bank_branch,\n 'lc_bank_address':lc_bank_address,\n 'currency_symbol':currency_symbol.symbol,\n 'ordered_products_total_amount':ordered_products_total_amount,\n 'bank_name':beneficiary_bank_name,\n 'bank_brunch':beneficiary_bank_branch,\n 'bank_address':beneficiary_bank_address,\n 'swift_code':swift_code,\n }}\n\n else:\n res={} \n return res \n\n def split_from_list(self,list_name,data_field):\n save = []\n for r in list_name:\n save.append(r[data_field])\n combine = '\\n'.join([str(i) for i in save])\n return combine \n\n def products_total_amount(self,invoice_lines_product_amount):\n total_amount= []\n idx = 0\n for r in invoice_lines_product_amount:\n total_amount.append(r['price_subtotal'])\n combine = sum(total_amount)\n return combine"
},
{
"alpha_fraction": 0.7345309257507324,
"alphanum_fraction": 0.7405189871788025,
"avg_line_length": 40.83333206176758,
"blob_id": "a93e48e4eb50b876e1515e0bbcb6fffdb9203d2d",
"content_id": "a0026e68a129b6512b30e63f867db38ec9674566",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 501,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 12,
"path": "/models/beneficiary_bank_branch.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass BeneficiaryBankBranch(models.Model):\n _name = 'beneficiary_bank_branch.model'\n\n\n name = fields.Char(required=True, string='Beneficiary Bank Name',size=100)\n bank_branch = fields.Text('Beneficiary Bank Brunch')\n bank_address = fields.Text('Beneficiary Bank Address')\n swift_code = fields.Text('Swift Code')\n date = fields.Date('Created Dated', required=True, default=fields.Date.today())"
},
{
"alpha_fraction": 0.7354838848114014,
"alphanum_fraction": 0.7354838848114014,
"avg_line_length": 33.33333206176758,
"blob_id": "e9fe0b60adead3405cfcd88225cd0e25728f455c",
"content_id": "cfccf0d83ead3034dfa57b3e0b5c5275dd2bfccf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 310,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 9,
"path": "/models/bank_branch.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass BankBranch(models.Model):\n _name = 'bank_branch.model'\n\n\n name = fields.Char(required=True, string='Branch Name')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n\n"
},
{
"alpha_fraction": 0.5126270651817322,
"alphanum_fraction": 0.5145949721336365,
"avg_line_length": 35.2023811340332,
"blob_id": "002fa76fad8d4a02c03a15b173b6cfc4ddfb55ab",
"content_id": "4e07141b32cc48a577f280e609d409f5e180b791",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3049,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 84,
"path": "/models/lc_informations.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\n# from openerp.exceptions import ValidationError\n\nclass LCinformations(models.Model):\n _name = 'lc_informations.model'\n\n\n name = fields.Char(required=True, string='L/C No.')\n pi_no_id = fields.Many2one('sale.order',string='P/I No')\n pi_no = fields.Char(string='P/I No')\n created_date = fields.Date('L/C Created Dated', required=True, default=fields.Date.today())\n bank_name = fields.Many2one('bank_names_branch_address.model',required=True, string='LC Bank Name')\n bank_name2 = fields.Char(required=True, string='LC Bank Name')\n bank_branch = fields.Char(required=True, string='LC Bank Branch')\n bank_address = fields.Text('Bank Address',required=True,)\n vat_no = fields.Char('VAT No.')\n irc_no = fields.Char('IRC No.')\n bin_no = fields.Char('BIN No.')\n tin_no = fields.Char('TIN No.')\n shipment_last_date = fields.Date('Last Date Of Shipment')\n amend_no = fields.Char('Amend No.')\n amend_date = fields.Date('Amend Date')\n \n\n\n\n # @api.one \n # @api.constrains('created_date')\n # def _check_lc_date(self):\n # if self.created_date > fields.Date.today():\n # raise ValidationError(_(\"L/C Date can't be greater than current date!\"))\n\n\n \n def onchange_bank_name_branch(self, cr, uid, ids, bank_name, context=None):\n bank_name_id = bank_name\n if bank_name_id :\n service_obj = self.pool.get('bank_names_branch_address.model').browse(cr, uid,bank_name_id,context=context)\n lc_bank_name = service_obj.bank_name\n lc_bank_branch = service_obj.bank_branch\n lc_bank_address = service_obj.bank_address\n if lc_bank_branch and lc_bank_address:\n res = {\n 'value': {\n 'bank_name2': lc_bank_name,\n 'bank_branch': lc_bank_branch,\n 'bank_address': lc_bank_address,\n }\n }\n else :\n res = {\n 'value': {\n 'bank_name2': '',\n 'bank_branch': '',\n 'bank_address': ''\n }\n }\n else:\n res = {}\n return res\n\n def onchange_pi_no_id(self, cr, uid, ids, pi_no_id, context=None): \n pi_no_id = pi_no_id\n if pi_no_id:\n service_obj = self.pool.get('sale.order').browse(cr, uid,pi_no_id,context=context)\n pi_no = service_obj.name\n if pi_no:\n res = {\n 'value': {\n 'pi_no': pi_no,\n }\n } \n else:\n res = {}\n return res \n\n \n def split_bank_address(self,bank_address_in_list):\n address= []\n idx = 0\n for r in bank_address_in_list:\n address.append(r['address']) \n combine = '\\n \\n \\n'.join([str(i) for i in address])\n return combine\n\n "
},
{
"alpha_fraction": 0.7386666536331177,
"alphanum_fraction": 0.746666669845581,
"avg_line_length": 40.66666793823242,
"blob_id": "ff611af331791d6b7024a875d9c46d1580df8c28",
"content_id": "a3d070438e735dd97d656d90d98f9557a2d1f504",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 375,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 9,
"path": "/models/shipping_factory_name_address.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields\n\nclass ShippingFactoryNameAddress(models.Model):\n _name = 'shipping_factory_name_address.model' \n\n\n name = fields.Char(required=True, string='Shipping Factory Name',size=250)\n address = fields.Text(required=True, string='Shipping Factory Address')\n date = fields.Date('Created date', required=True, default=fields.Date.today())\n"
},
{
"alpha_fraction": 0.5995793342590332,
"alphanum_fraction": 0.6021608114242554,
"avg_line_length": 52.36224365234375,
"blob_id": "110f990c79dad936778c314ba5cbb264187a2948",
"content_id": "b74066e14f4075c5064e723f326e507ff9627307",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10459,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 196,
"path": "/models/packing_list.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\n\nclass PackingListModel(models.Model):\n _name = 'packing_list.model'\n name = fields.Many2one('commercial_invoice.model',string='Commercial Invoice No.', required=True)\n commercial_invoice_no = fields.Char(string='commercial_invoice_no')\n packing_list_created_date = fields.Date(string='Created Date', default=fields.Date.today(), required=True)\n customer_name = fields.Char(string='Buyer', required=True)\n customer_name2 = fields.Char(string='Buyer', required=True)\n customer_full_address = fields.Text(string='Buyer Address', required=True)\n commodity = fields.Char(string='Commodity', required=True, default=\"Export Quality Yarn\")\n delivery_form = fields.Text(string='Delivery From', required=True)\n ordered_products_name = fields.Text(string='ordered_products_name') \n ordered_products_number_of_bags = fields.Text(string='ordered_products_number_of_bags') \n ordered_products_quantity = fields.Text(string='ordered_products_quantity')\n gross_weights = fields.Text(string='gross weights')\n total_gross_weight = fields.Char(string='gross weight')\n total_gross_weight2 = fields.Char(string='gross weight', required=True)\n total_bags = fields.Char(string='Total Bags')\n total_bags2 = fields.Char(string='Total Bags', required=True)\n num_of_bags = fields.Integer(string='Number of bags', required=True) \n proforma_invoice_uniq_id = fields.Char(string='Proforma Invoice No.', required=True)\n proforma_invoice_created_date = fields.Date(string='proforma_invoice_created_date', required=True)\n lc_num = fields.Char(string='L/C No.', required=True)\n lc_num2 = fields.Char(string='L/C No.', required=True)\n lc_date = fields.Date(string='L/C Dated', required=True)\n lc_date2 = fields.Date(string='L/C Dated', required=True)\n contact_no = fields.Char(string='contact no', required=True)\n \n # This function is for load data automatically in the existing field from another table\n def onchange_commercial_invoice_id(self, cr, uid, ids, name=False, context=None):\n res= {}\n if name:\n all_data_of_commercial_invoice = self.pool.get('commercial_invoice.model').browse(cr, uid, name,context=context)\n commercial_invoice_no = all_data_of_commercial_invoice.name\n proforma_invoice_id = all_data_of_commercial_invoice.pi_id\n proforma_invoice_uniq_id = all_data_of_commercial_invoice.proforma_invoice_id\n proforma_invoice_created_date= all_data_of_commercial_invoice.proforma_invoice_created_date\n contact_no= all_data_of_commercial_invoice.contact_no\n supplier_factory_address= all_data_of_commercial_invoice.supplier_factory_address\n num_of_bags = all_data_of_commercial_invoice.num_of_bags \n\n service_obj= self.pool.get('sale.order').browse(cr, uid,proforma_invoice_id.id,context=context)\n lc_id = service_obj.lc_num_id\n lc_info_pool_ids = self.pool.get('lc_informations.model').browse(cr, uid,lc_id.id,context=context)\n lc_num = lc_info_pool_ids.name\n lc_date = lc_info_pool_ids.created_date\n service_obj2= self.pool.get('res.partner').browse(cr, uid,service_obj.partner_id.id,context=context)\n service_obj3= self.pool.get('res.country').browse(cr, uid,service_obj2.country_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n cus_name = service_obj2.name\n cus_full_address = str(service_obj2.street) + \" , \" + str(service_obj2.street2) + \" , \" + str(service_obj2.city)+ \" - \" + str(service_obj2.zip) + \" , \" + str(service_obj3.name)\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise Warning(_('Account invoice list is empty.'))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context)\n invoice_lines_product_name = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['name'], context=context)\n invoice_lines_product_quantity = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['quantity','name'], context=context)\n invoice_lines_product_price_of_unit = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_unit','name'], context=context)\n invoice_lines_product_amount = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_subtotal','name'], context=context)\n \n ordered_products_names = self.split_products_names(invoice_lines_product_name) \n ordered_products_number_of_bags = self.split_products_number_of_bags(invoice_lines_product_quantity,num_of_bags)\n ordered_products_quantity = self.split_products_quantity(invoice_lines_product_quantity)\n gross_weights = self.calculation_of_gross_weights(invoice_lines_product_quantity)\n total_gross_weight = self.calculation_of_total_gross_weight(invoice_lines_product_quantity)\n total_bags = self.total_bags_in_quantity(invoice_lines_product_quantity,num_of_bags)\n\n res = {'value':{\n 'commercial_invoice_no':commercial_invoice_no, \n 'customer_name' : cus_name,\n 'customer_name2' : cus_name,\n 'customer_full_address' : cus_full_address,\n 'ordered_products_name':ordered_products_names,\n 'ordered_products_number_of_bags':ordered_products_number_of_bags,\n 'ordered_products_quantity':ordered_products_quantity,\n 'gross_weights':gross_weights,\n 'total_gross_weight':\"{:,}\".format( total_gross_weight ),\n 'total_gross_weight2':\"{:,}\".format( total_gross_weight ),\n 'total_bags':\"{:,}\".format( total_bags ),\n 'total_bags2':\"{:,}\".format( total_bags ),\n 'num_of_bags':num_of_bags,\n 'proforma_invoice_uniq_id':proforma_invoice_uniq_id,\n 'proforma_invoice_created_date':proforma_invoice_created_date,\n 'lc_num':lc_num,\n 'lc_num2':lc_num,\n 'lc_date':lc_date, \n 'lc_date2':lc_date,\n 'contact_no':contact_no, \n 'delivery_form':supplier_factory_address,\n }} \n else:\n res={} \n return res \n\n\n\n def split_products_names(self,invoice_lines_product_name):\n seen = set()\n answer = []\n names= []\n for r in invoice_lines_product_name:\n names.append(r['name'])\n combine_names = '\\n'.join([str(i) for i in names])\n for line in combine_names.splitlines():\n if line not in seen:\n seen.add(line)\n answer.append(line)\n combine = '\\n'.join(answer)\n return combine\n\n def split_products_number_of_bags(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n bags = int(num_of_bags)\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity'] / bags)\n except:\n d=item['name']\n testListDict[d] = int(item['quantity'] / bags)\n \n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in number_of_bags])\n return combine\n\n def total_bags_in_quantity(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n testListDict = {}\n bags = int(num_of_bags)\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(int(the_value / bags)) \n total = sum(number_of_bags)\n return total \n\n def split_products_quantity(self,invoice_lines_product_quantity):\n quantity= []\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n\n for the_key, the_value in testListDict.iteritems():\n quantity.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in quantity])\n return combine\n\n def calculation_of_gross_weights(self,invoice_lines_product_quantity):\n gross_weights = []\n testListDict = {}\n into = 1.04\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n gross_weights.append(the_value)\n result = [ \"{:,}\".format( int(x * into) ) for x in gross_weights]\n combine = '\\n \\n'.join([str(i) for i in result]) \n return combine\n\n def calculation_of_total_gross_weight(self,invoice_lines_product_quantity):\n squantity = []\n testListDict = {}\n into = 1.04\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n for the_key, the_value in testListDict.iteritems():\n squantity.append(the_value)\n result = [ x * into for x in squantity]\n gross = int(sum(result)) \n\n return gross\n"
},
{
"alpha_fraction": 0.7476340532302856,
"alphanum_fraction": 0.7476340532302856,
"avg_line_length": 34.11111068725586,
"blob_id": "cfcd0aa343e58e8bf726995ab7de9e46f26eacc2",
"content_id": "7c46cb4ed8178db011d6c254758fd3cd9c1367c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 317,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 9,
"path": "/models/reimbursement.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass Reimbursement(models.Model):\n _name = 'reimbursement.model'\n\n\n name = fields.Text(required=True, string='Reimbursement')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n\n"
},
{
"alpha_fraction": 0.7377049326896667,
"alphanum_fraction": 0.7377049326896667,
"avg_line_length": 32.77777862548828,
"blob_id": "c1d3859c60b3ff87c78f6dff8cca96cf974a9b81",
"content_id": "f2473e131d0b328127925938bd1218be31227bc6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 305,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 9,
"path": "/models/commodity.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass Commodity(models.Model):\n _name = 'commodity.model'\n\n\n name = fields.Text(required=True, string='Commodity')\n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n\n"
},
{
"alpha_fraction": 0.5676470398902893,
"alphanum_fraction": 0.5694117546081543,
"avg_line_length": 29.85454559326172,
"blob_id": "6e3a03e24650862a88b66183474c6291ee0c9a16",
"content_id": "6f5e2861e91818c8e25d063bca230c868d418fa1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1700,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 55,
"path": "/__openerp__.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n{\n 'name' : 'LC Report Generator',\n\n 'summary': \"LC report management\",\n\n 'description': 'Simplly creat your LC report',\n\n 'author': \"Metaporphosis.com.bd\",\n 'website': \"http://www.metamorphosis.com.bd/\",\n\n 'version': '0.1',\n\n 'depends': [\n 'base',\n 'account',\n ],\n\n 'data': [ \n 'views/customer_invoices_records.xml',\n 'views/commercial_invoices.xml',\n 'views/lc_informations.xml',\n 'views/invoice_name_sequence.xml',\n 'views/country_origin.xml', \n 'views/delivery_transport.xml',\n # 'views/delivery_address.xml', \n # 'views/shipping_factory_name_address.xml', \n # 'views/proforma_invoice.xml', \n 'views/packing_list.xml', \n 'views/truck_challan.xml', \n 'views/delivery_challan.xml', \n 'views/beneficiary_certificate.xml', \n 'views/certificate_of_origin.xml', \n 'views/forwarding_letter.xml', \n 'views/bill_of_exchange.xml', \n 'views/terms_conditions.xml', \n 'views/supplier_factory_name_addr.xml', \n # 'views/customer_factory_name_addr.xml', \n # 'views/bank_names.xml',\n # 'views/bank_branch.xml', \n 'views/bank_names_branch_address.xml', \n # 'views/beneficiary_full_name.xml', \n 'views/reimbursement.xml', \n 'views/method_of_payment.xml', \n 'views/product_type.xml', \n 'views/terms_of_delivery.xml',\n # 'views/commodity.xml',\n # 'views/beneficiary_bank_branch.xml',\n 'views/summery_reports/proforma_invoice_status.xml',\n 'views/signature_upload.xml',\n \n ],\n 'auto_install':False,\n 'installable': True,\n} "
},
{
"alpha_fraction": 0.7259259223937988,
"alphanum_fraction": 0.7333333492279053,
"avg_line_length": 32.75,
"blob_id": "64e5725b59aaf9675e88188ac70c408338a0e853",
"content_id": "e5ef4f197a910aa146d1317304955d9b711f3bd1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 270,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 8,
"path": "/models/delivery_transport.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields\n\nclass DeliveryTransport(models.Model):\n _name = 'delivery_transport.model'\n\n\n name = fields.Char(required=True, string='Transport name',size=64)\n date = fields.Date('Created date', required=True, default=fields.Date.today())\n"
},
{
"alpha_fraction": 0.755836546421051,
"alphanum_fraction": 0.755836546421051,
"avg_line_length": 32.129032135009766,
"blob_id": "ed1aacb121e55e07099869426b38adbc91980891",
"content_id": "8fb0a561ed3b7730f15a684390f058137feea1e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1028,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 31,
"path": "/models/__init__.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from . import customer_invoices\nfrom . import commercial_invoices \nfrom . import lc_informations \nfrom . import country_origin \nfrom . import delivery_transport \n# from . import delivery_address \n# from . import shipping_factory_name_address \n# from . import proforma_invoice \nfrom . import packing_list \nfrom . import truck_challan \nfrom . import delivery_challan \nfrom . import beneficiary_certificate \nfrom . import certificate_of_origin \nfrom . import forwarding_letter \nfrom . import bill_of_exchange \nfrom . import terms_conditions \nfrom . import supplier_factory_name_addr \n# from . import customer_factory_name_addr \n# from . import bank_names\n# from . import bank_branch \nfrom . import bank_names_branch_address \n# from . import beneficiary_full_name \nfrom . import reimbursement \nfrom . import method_of_payment \nfrom . import product_type \nfrom . import terms_of_delivery\n# from . import commodity\n# from . import beneficiary_bank_branch\n\nfrom . import summery_reports\nfrom . import signature_upload\n\n"
},
{
"alpha_fraction": 0.7749999761581421,
"alphanum_fraction": 0.7749999761581421,
"avg_line_length": 37,
"blob_id": "9c8b4867fcf856b788f6128fbf87d9808b356c28",
"content_id": "d29c972847684f5e8088fafd8051e93c9a64d3cf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 40,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 1,
"path": "/models/summery_reports/__init__.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from . import proforma_invoice_status\n\n\n"
},
{
"alpha_fraction": 0.5787970423698425,
"alphanum_fraction": 0.5827314853668213,
"avg_line_length": 49.1135139465332,
"blob_id": "27b2639cdf9d98eb23c137a3ce507c6ae4c058eb",
"content_id": "a927585a43752d7e0a0addca8075ac82d4adc93b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 18554,
"license_type": "no_license",
"max_line_length": 185,
"num_lines": 370,
"path": "/models/commercial_invoices.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api,_\nfrom openerp.exceptions import except_orm, Warning, RedirectWarning\n\nclass CommercialInvoiceModel(models.Model):\n _name = 'commercial_invoice.model'\n _rec_name = \"name\"\n name = fields.Char(string='Commercial Invoice Number',readonly=True)\n commercial_invoice_created_date = fields.Date(string='Created Date',default=fields.Date.today())\n customer_invoice_id = fields.Many2one('account.invoice',string='Customer Invoice No.')\n pi_id = fields.Many2one('sale.order',string='Proforma Invoice No.', required=True)\n customer_name = fields.Char(string='Customer Name')\n customer_name2 = fields.Char(string='Customer Name')\n customer_full_address = fields.Text(string='Customer Address')\n proforma_invoice_id = fields.Char(string='Proforma Invoice No.')\n proforma_invoice_created_date = fields.Date(string='Proforma Invoice Date')\n transport = fields.Many2one('delivery_transport.model',string='Means of Transport', required=True)\n supplier_factory_name = fields.Char(string='Delivery From Factory Name')\n supplier_factory_address = fields.Text(string='Delivery From Factory Address')\n beneficiary_vat_no = fields.Char(string='Beneficiary VAT No:', required=True)\n erc_no = fields.Char(string='ERC No')\n country_of_origin = fields.Char(string='Country Of Origin')\n country_of_origin2 = fields.Char(string='Country Of Origin')\n destination_address = fields.Text(string='Destination')\n client_shipping_factory_address = fields.Text(string='Factory Address') \n lc_id = fields.Char('L/C id')\n lc_num = fields.Char('L/C No.')\n lc_num2 = fields.Char(string='L/C No.')\n lc_date = fields.Date(string='L/C Dated')\n lc_date2 = fields.Date(string='L/C Dated')\n issuing_bank = fields.Text(string='Issuing Bank')\n vat_code = fields.Char(string='VAT No.' )\n irc_num = fields.Char(string='IRC No.' )\n bin_num = fields.Char(string='BIN No.' ) \n tin_num = fields.Char(string='TIN No.' )\n amend_no = fields.Char(string='Amend No' )\n amend_date = fields.Date(string='Amend Date' )\n ordered_products_name = fields.Text(string='ordered_products_name') \n ordered_products_number_of_bags = fields.Text(string='ordered_products_number_of_bags') \n ordered_products_quantity = fields.Text(string='ordered_products_quantity') \n ordered_products_price_of_unit = fields.Text(string='ordered_products_price_of_unit')\n ordered_products_amount = fields.Text(string='ordered_products_amount')\n ordered_products_total_quantity = fields.Char(string='ordered_products_total_quantity')\n ordered_products_total_amount = fields.Char(string='Total')\n ordered_products_total_amount_in_word = fields.Char(string='ordered_products_total_amount_in_word')\n currency_symbol_name = fields.Char(string='currency_symbol_name')\n currency_symbol_name1 = fields.Char(string='currency_symbol_name')\n currency_symbol_name2 = fields.Char(string='currency_symbol_name')\n currency_symbol = fields.Char(string='currency_symbol')\n currency_symbol1 = fields.Char(string='currency_symbol')\n currency_symbol2 = fields.Char(string='currency_symbol')\n contact_no = fields.Text(string='contact no',default='Export Sales Contract No. MCFN-MK010-018 dated 07-APR-18')\n only_seq_num = fields.Char(string='only_seq_num', size=255)\n num_of_bags = fields.Char(string='num_of_bags', size=255)\n delivery_order_num = fields.Char(string='Delivery Order Number') \n delivery_challan_num = fields.Char(string='Delivery Challan Number')\n delivery_order_created_date = fields.Date(string='Delivery Order Created date')\n\n @api.model\n def create(self, vals):\n \"\"\"\n Overrides orm create method.\n @param self: The object pointer\n @param vals: dictionary of fields value.\n \"\"\"\n if not vals:\n vals = {}\n seq_obj = self.env['ir.sequence']\n seq_obj2 = self.env['ir.sequence']\n invoice_num = seq_obj.next_by_code('commercial_invoice_report_num') or 'New'\n only_num = seq_obj2.next_by_code('only_num') or 'New_seqq'\n vals['name'] = invoice_num\n vals['only_seq_num'] = only_num\n return super(CommercialInvoiceModel, self).create(vals)\n\n def onchange_pi_id(self, cr, uid, ids, pi_id=False, context=None):\n res= {}\n if pi_id:\n service_obj= self.pool.get('sale.order').browse(cr, uid,pi_id,context=context) \n service_obj2= self.pool.get('res.partner').browse(cr, uid,service_obj.partner_id.id,context=context)\n service_obj3= self.pool.get('res.country').browse(cr, uid,service_obj2.country_id.id,context=context)\n currency_symbol= self.pool.get('res.currency').browse(cr, uid,service_obj.currency_id.id,context=context)\n cus_name = service_obj2.name\n cus_full_address = str(service_obj2.street) + \", \" + str(service_obj2.street2) + \", \" + str(service_obj2.city)+ \" - \" + str(service_obj2.zip) + \", \" + str(service_obj3.name)\n lc_id = service_obj.lc_num_id\n lc_service_obj= self.pool.get('lc_informations.model')\n rec = lc_service_obj.browse(cr, uid, lc_id.id)\n lc_bank_name = rec.bank_name2\n lc_bank_branch = rec.bank_branch\n lc_bank_address = rec.bank_address\n vat_no = rec.vat_no\n irc_no = rec.irc_no\n bin_no = rec.bin_no\n tin_no = rec.tin_no\n amend_no = rec.amend_no\n amend_date = rec.amend_date\n bank_info = str(lc_bank_name) + \"\\n\" + str(lc_bank_branch) + \"\\n\" + str(lc_bank_address)\n account_invoice_ids = self.pool.get('account.invoice').search(cr, uid,[('pi_no','=',service_obj.name),('process','=','set_for_LC')],context=context)\n if not account_invoice_ids:\n # print('Account invoice list is empty.')\n raise except_orm(_('Validation!'),\n _(\"No document ready for set L/C document under PI No. %s !\")% (service_obj.name,))\n else:\n invoice_line_pool_ids = self.pool.get('account.invoice.line').search(cr, uid,[('invoice_id','=',account_invoice_ids),],context=context) \n invoice_lines_product_name = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['name'], context=context)\n invoice_lines_product_quantity = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['quantity','name'], context=context)\n invoice_lines_product_price_of_unit = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_unit','name'], context=context)\n invoice_lines_product_amount = self.pool.get('account.invoice.line').read(cr, uid,invoice_line_pool_ids,['price_subtotal','name'], context=context)\n num_of_bags = service_obj.bags_of_packing\n ordered_products_names = self.split_products_names(invoice_lines_product_name) \n ordered_products_number_of_bags = self.split_products_number_of_bags(invoice_lines_product_quantity,num_of_bags)\n ordered_products_quantity = self.split_products_quantity(invoice_lines_product_quantity)\n ordered_products_price_of_unit = self.split_products_price_of_unit(invoice_lines_product_price_of_unit)\n ordered_products_amount = self.split_products_amount(invoice_lines_product_amount)\n ordered_products_total_quantity = self.products_total_quantity(invoice_lines_product_quantity)\n ordered_products_total_amount = self.products_total_amount(invoice_lines_product_amount)\n ordered_products_total_amount_in_word = self.numToWords(ordered_products_total_amount)\n\n do_no_read = self.pool.get('account.invoice').read(cr, uid,account_invoice_ids,['do_no'], context=context)\n do_no = self.split_do_no(do_no_read)\n stock_picking_ser_ids= self.pool.get('stock.picking').search(cr, uid,[('origin','=',service_obj.name),],context=context)\n order_date_read = self.pool.get('stock.picking').read(cr, uid,stock_picking_ser_ids,['date'], context=context)\n delivery_order_date = self.split_order_date_read(order_date_read)\n\n res = {'value':{\n 'customer_name':cus_name,\n 'customer_name2':service_obj2.name, \n 'customer_full_address':cus_full_address,\n 'proforma_invoice_id':service_obj.name,\n 'proforma_invoice_created_date':service_obj.create_date,\n 'supplier_factory_name':service_obj.place_of_delivery_name2, \n 'supplier_factory_address':service_obj.place_of_delivery_addr, \n 'client_shipping_factory_address':service_obj.cus_factory_addr,\n 'destination_address': service_obj.cus_factory_addr,\n 'erc_no':service_obj.erc_no,\n 'country_of_origin':service_obj.country_of_origin2,\n 'country_of_origin2':service_obj.country_of_origin2,\n 'num_of_bags': service_obj.bags_of_packing, \n 'lc_id':lc_id.id,\n 'lc_num':rec.name,\n 'lc_num2':rec.name,\n 'lc_date':rec.created_date,\n 'issuing_bank':bank_info,\n 'lc_date2':rec.created_date,\n 'vat_code':rec.vat_no,\n 'irc_num':rec.irc_no,\n 'bin_num':rec.bin_no, \n 'tin_num':rec.tin_no,\n 'amend_no':rec.amend_no,\n 'amend_date':rec.amend_date,\n 'ordered_products_name':ordered_products_names,\n 'ordered_products_number_of_bags':ordered_products_number_of_bags,\n 'ordered_products_quantity':ordered_products_quantity,\n 'ordered_products_price_of_unit':ordered_products_price_of_unit,\n 'ordered_products_amount': ordered_products_amount,\n 'ordered_products_total_quantity': \"{:,}\".format(ordered_products_total_quantity),\n 'ordered_products_total_amount': \"{:,}\".format(ordered_products_total_amount),\n 'ordered_products_total_amount_in_word':ordered_products_total_amount_in_word,\n 'currency_symbol_name':currency_symbol.name,\n 'currency_symbol_name1':currency_symbol.name,\n 'currency_symbol_name2':currency_symbol.name,\n 'currency_symbol':currency_symbol.symbol,\n 'currency_symbol1':currency_symbol.symbol,\n 'currency_symbol2':currency_symbol.symbol, \n 'delivery_order_num':do_no, \n 'delivery_order_created_date':delivery_order_date, \n }}\n else:\n res={} \n return res \n\n def split_order_date_read(self,order_date_read):\n seen = set()\n date= []\n answer = []\n for r in order_date_read: \n date.append(r['date'])\n combine_date = '\\n'.join([str(i) for i in date])\n for line in combine_date.splitlines():\n if line not in seen:\n seen.add(line)\n answer.append(line)\n combine = '\\n'.join(answer)\n return combine\n\n def split_do_no(self,do_no_read):\n seen = set()\n do= []\n answer = []\n for r in do_no_read: \n do.append(r['do_no'])\n combine_do = '\\n'.join([str(i) for i in do])\n for line in combine_do.splitlines():\n if line not in seen:\n seen.add(line)\n answer.append(line)\n combine = '\\n'.join(answer)\n return combine \n\n def split_products_names(self,invoice_lines_product_name):\n seen = set()\n answer = []\n names= []\n for r in invoice_lines_product_name:\n names.append(r['name'])\n combine_names = '\\n'.join([str(i) for i in names])\n for line in combine_names.splitlines():\n if line not in seen:\n seen.add(line)\n answer.append(line)\n combine = '\\n'.join(answer)\n return combine \n\n def split_products_number_of_bags(self,invoice_lines_product_quantity,num_of_bags):\n number_of_bags= []\n bags = int(num_of_bags)\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity'] / bags)\n except:\n d=item['name']\n testListDict[d] = int(item['quantity'] / bags)\n \n for the_key, the_value in testListDict.iteritems():\n number_of_bags.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in number_of_bags])\n return combine \n\n def split_products_quantity(self,invoice_lines_product_quantity):\n quantity= []\n testListDict = {}\n for item in invoice_lines_product_quantity:\n try:\n d=item['name']\n testListDict[d] += int(item['quantity']) \n except:\n d=item['name']\n testListDict[d] = int(item['quantity'])\n\n for the_key, the_value in testListDict.iteritems():\n quantity.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in quantity])\n return combine \n\n def split_products_price_of_unit(self,invoice_lines_product_price_of_unit):\n price_of_unit= []\n testListDict = {}\n for item in invoice_lines_product_price_of_unit:\n try:\n d=item['name']\n testListDict[d] = item['price_unit'] \n except:\n d=item['name']\n testListDict[d] = item['price_unit']\n\n for the_key, the_value in testListDict.iteritems():\n price_of_unit.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in price_of_unit])\n return combine\n\n def split_products_amount(self,invoice_lines_product_amount):\n amount= []\n testListDict = {}\n for item in invoice_lines_product_amount:\n try:\n d=item['name']\n testListDict[d] += int(item['price_subtotal']) \n except:\n d=item['name']\n testListDict[d] = int(item['price_subtotal'])\n\n for the_key, the_value in testListDict.iteritems():\n amount.append(the_value)\n combine = '\\n \\n'.join([str(i) for i in amount])\n return combine \n\n def products_total_quantity(self,invoice_lines_product_quantity):\n total_quantity= []\n for r in invoice_lines_product_quantity: \n total_quantity.append(r['quantity'])\n in_com = sum(total_quantity)\n combine = int(in_com)\n return combine \n\n def products_total_amount(self,invoice_lines_product_amount):\n total_amount= []\n idx = 0\n for r in invoice_lines_product_amount:\n total_amount.append(r['price_subtotal'])\n combine = sum(total_amount)\n return combine\n\n def onchange_client_shipping_factory_name(self, cr, uid, ids, client_shipping_factory_name=False, context=None):\n res= {}\n if client_shipping_factory_name:\n service_obj= self.pool.get('customer_factory_name_address.model')\n rec = service_obj.browse(cr, uid, client_shipping_factory_name)\n res = {'value':{\n 'client_shipping_factory_address':rec.address,\n 'destination_address':rec.address\n }}\n else:\n res={} \n return res\n\n def onchange_supplier_factory_name(self, cr, uid, ids, supplier_factory_name=False, context=None):\n res= {}\n if supplier_factory_name:\n service_obj= self.pool.get('supplier_factory_name_address.model')\n rec = service_obj.browse(cr, uid, supplier_factory_name)\n res = {'value':{\n 'supplier_factory_address':rec.address\n }}\n else:\n res={} \n return res\n\n def numToWords(self,num,join=True):\n '''words = {} convert an integer number into words'''\n units = ['','one','two','three','four','five','six','seven','eight','nine']\n teens = ['','eleven','twelve','thirteen','fourteen','fifteen','sixteen', \\\n 'seventeen','eighteen','nineteen']\n tens = ['','ten','twenty','thirty','forty','fifty','sixty','seventy', \\\n 'eighty','ninety']\n thousands = ['','thousand','million','billion','trillion','quadrillion', \\\n 'quintillion','sextillion','septillion','octillion', \\\n 'nonillion','decillion','undecillion','duodecillion', \\\n 'tredecillion','quattuordecillion','sexdecillion', \\\n 'septendecillion','octodecillion','novemdecillion', \\\n 'vigintillion']\n words = []\n if num==0: words.append('zero')\n else:\n numStr = '%d'%num\n numStrLen = len(numStr)\n groups = (numStrLen+2)/3\n numStr = numStr.zfill(groups*3)\n for i in range(0,groups*3,3):\n h,t,u = int(numStr[i]),int(numStr[i+1]),int(numStr[i+2])\n g = groups-(i/3+1)\n if h>=1:\n words.append(units[h])\n words.append('hundred')\n if t>1:\n words.append(tens[t])\n if u>=1: words.append(units[u])\n elif t==1:\n if u>=1: words.append(teens[u])\n else: words.append(tens[t])\n else:\n if u>=1: words.append(units[u])\n if (g>=1) and ((h+t+u)>0): words.append(thousands[g]+',')\n if join: return ' '.join(words)\n return words\n\n # def split_products_names(self,invoice_lines_product_name):\n # names= []\n # idx = 0\n # for r in invoice_lines_product_name:\n # names.append(r['name'])\n # combine = '\\n'.join([str(i) for i in names]) \n # return combine \n\n # def split_from_list(self,list_name,data_field):\n # save = []\n # for r in list_name:\n # save.append(r[data_field])\n # combine = '\\n'.join([str(i) for i in save])\n # return combine \n\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.6860902309417725,
"alphanum_fraction": 0.6917293071746826,
"avg_line_length": 32.3125,
"blob_id": "2cd65c8a06aa485dd50eb418d44064119481d1bb",
"content_id": "d231ac7beb685267fec57535bfb156fc82a5fc4b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 532,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 16,
"path": "/models/terms_conditions.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass TermsConditions(models.Model):\n _name = 'terms_conditions.model'\n\n\n name = fields.Text(required=True, string='Other Terms and Conditions',size=100)\n date = fields.Date('Created Date', required=True, default=fields.Date.today())\n\n\n @api.one \n @api.constrains('date')\n def _check_lc_date(self):\n if self.date > fields.Date.today():\n raise ValidationError(_(\"Date can't be greater than current date!\"))"
},
{
"alpha_fraction": 0.7320754528045654,
"alphanum_fraction": 0.7320754528045654,
"avg_line_length": 32.125,
"blob_id": "fc2ccfbdfb587575ae1344ce535428d5129bd19c",
"content_id": "25704aef86b71b5a2bb43194d4171cf7632d15df",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 265,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 8,
"path": "/models/delivery_address.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields\n\nclass DeliveryAddress(models.Model):\n _name = 'delivery_address.model'\n\n\n name = fields.Text(required=True, string='Delivery From Address')\n date = fields.Date('Created date', required=True, default=fields.Date.today())\n"
},
{
"alpha_fraction": 0.6985645890235901,
"alphanum_fraction": 0.6985645890235901,
"avg_line_length": 28.85714340209961,
"blob_id": "b36442ca71e5e15bbb64e97448539ce7613cf015",
"content_id": "0a2fdcbbcddd8bff2c1b0c4efb51d70b598eab31",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 418,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 14,
"path": "/models/signature_upload.py",
"repo_name": "Arnavbd1971/lc_report_generator_odoo8_aahold",
"src_encoding": "UTF-8",
"text": "from openerp import models, fields, api, _\nfrom openerp.exceptions import ValidationError\n\nclass Signature(models.Model):\n _name = 'signature_upload.model'\n\n\n name = fields.Char(required=True, string='Name')\n \n my_binary_field_name = fields.Binary(\n 'Signarute',help=\"Select signature image here\"\n )\n \n created_date = fields.Date('Created Dated', required=True, default=fields.Date.today())\n"
}
] | 33 |
LucasBR96/MST-ANIMATION | https://github.com/LucasBR96/MST-ANIMATION | 2d52adedb1a2e8d74db0bace5fc2d341435b47fd | 67183b454bc319c1618b15ba8d1b4a7d95b9fff2 | 9af2cede4fd7dae8d428988e4b971adacdbc35f8 | refs/heads/main | 2023-04-10T21:30:56.640105 | 2021-04-23T23:15:45 | 2021-04-23T23:15:45 | 359,779,749 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5923240780830383,
"alphanum_fraction": 0.59957355260849,
"avg_line_length": 27.94444465637207,
"blob_id": "1fd4d8c9cf6b4182e58dfe915401163a248b13cd",
"content_id": "5e561fafed658d83c3f27399b93d72496819b01d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4690,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 162,
"path": "/README.md",
"repo_name": "LucasBR96/MST-ANIMATION",
"src_encoding": "UTF-8",
"text": "# An animation for the resolution of the Minimum Spanning Tree problem\n\n## About\n**UFF - UNIVERSIDADE FEDERAL FLUMINENSE**\n**AUTHORS:** Lucas Fuzato Cipriano, Mariana Suarez de Oliveira\n**CLASS:** Algorithm Design and Analysis.\n**PROFESSOR:** Mr. Celso da Cruz de Oliveira\n\n## Overview\n\nThe goal of this project is to develop a desktop application that show\nthe step by step resolution of the minnimum spanning tree problem through\na simple graphic user interface. The animation should be clear enought for\nthe understanding of the execution and the user must be able to customize the\ninput through the command line interface\n\n## The MST problem and its solutions\n\nSupose the existance of a graph **G( V , E )** that is undirected and have weighted\n( positive ) edges. Find for G a tree **T( Va , Ea )** that:\n - have all vertices of G ( Spanning )\n - the sum of the weights of its edges is the smallest possible ( Minimum )\n\nThe problem is solveable by the use of greedy algorithms for optimal value, and its generic solution is \nthe one described below.\n\n### Generic Solution\n\n```\nGEN-MST( V , E )\n\n Va = {}\n Ea = {}\n\n while V - Va != {}\n\n safe_edges = { e | e in E , SAFE( e , Va ) }\n\n e = ARG-MIN-WEIGHT( safe_edges , E )\n Ea = Ea + { e }\n\n y = { y' | y' in e , y not in Va }\n Va = Va + y\n \n return Va , Ea\n\n```\n\nin this context, an edgde is said **safe** to a given set **Va** when at least one of her vertices\nis not in **Va**. The greedy choice here is to choose the safe edge to Va with the smallest weight\n, add it to Ea and uptdate **Va** with its new( s ) vertice( s ). When **Va == V**, we have no more \nsafe edges, therefore we quit.\n\nAny solution that fits the description of the genereric one is solves the MST for optimal value. In this project,\ntwo are of most interest: Kruskall and Prim.\n\n### Kruskal Solution\n\nThe algorithm, introduced in 1956, by joseph kruskall, have the following pseudocode\n\n```\nKRUSKAL-MST( V , E )\n\n Va = {}\n Ea = {}\n\n E' = SORT-WEIGHT( E ) //1\n T = { { x } | x in V } //2\n\n for e in E'\n ( a , b ) = e\n\n T1 = TREE( a , T ) // 3\n T2 = TREE( b , T )\n if T1 == T2\n continue\n\n Ea = Ea + { e }\n y = { y' | y' in e , y not in Va }\n Va = Va + y\n\n //4\n T = T - T1\n T = T - T2\n T = T + MERGE( T1 , T2 )\n return Va , Ea\n```\n\n1 - **E'** is a ordenation of de edges in **E**, ascending on weight. By iterating on **E'** every safe edge found\nmust be an edge of **Ea**. Thus **E'** must be iterated only once.\n\n2 - **T** is the set of all trees present at each iteration. It is initialized with everey tree with one node of V.\n\n3 - Find the Tree in **T** where node **a** is located. **T1** must be different from **T2**, if they aren't it means\nthat the current edge is not safe, so adding it will create a cycle.\n\n4 - If the current edge have node in each tree, it means that it unites two distinc trees. So adding it to **Ea** \nimplies that **T1** and **T2** must be replaced by the union of both.\n\n### Prim Solution\n\n```\nPRIM-MST( V , E )\n\n // 1\n x = RANDOM( V )\n Va = { x }\n Ea = {}\n\n // 2\n P = CONNECTING-EDGES( x , E )\n\n // 3\n Q = COPY( V )\n while Q != {}\n\n // 4\n e = ARG-MIN-WEIGHT( P )\n P = P - { e }\n \n ( a , b ) = e\n if ( a in Va ) and ( b in Va )\n continue\n \n Ea = Ea + { e }\n y = { y' | y' in e , y not in Va }\n Va = Va + { y }\n\n // 5\n Q = Q - { y }\n P' = CONNECTING-EDGES( y , E )\n P = P + P'\n\n return Va , Ea\n```\n\n1 - Unlike kruskal, that gives a every edge a tree, prim initializes a single tree with one edge, and builds \nthe solution from there.\n\n2 - **CONNECTING-EDGES( x , E )** is a function whicht given a node **x** and a set of edges **E**, it returns\na subset of **E** where every edge have one of its nodes equal to **x**. Thus the value **P** represent the pool\nof possible new edges of desired tree.\n\n3 - **Q** is the set of nodes of the graph that are not yet visited by the tree. When the tree spans all nodes, Q\nwill be empty.\n\n4 - **ARG-MIN-WEIGHT( P )** returns the edge with the smallest weight. if **e** is such edge, it will be verified\nfor safety. If safe, it will be added to the set of edges of the tree, if not, it will be discarded. Anyway, it doesn't\nneed to go back to **P**\n\n5 - **y** is the new node of the tree, if **e** is safe. **P** must now include all the edges that connect to **y**.\nSince it is now part of the desired tree, the node must be removed from **Q**.\n\n### Proof of correctness\n\n## Command Line manual\n\n## GUI design\n\n## How to install\n\n## References\n\n"
},
{
"alpha_fraction": 0.542980968952179,
"alphanum_fraction": 0.5559836030006409,
"avg_line_length": 24.80124282836914,
"blob_id": "489deb6c5fe4726810e33a1a828872dd7b512be7",
"content_id": "6a835e1967317381bfcf00e0199ddfbd2b3a3060",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4153,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 161,
"path": "/monitor_with_animation_test.py",
"repo_name": "LucasBR96/MST-ANIMATION",
"src_encoding": "UTF-8",
"text": "import kruskal_monitor as krus\nimport prim_monitor as prim\n\nimport networkx as nx\nfrom matplotlib import animation, rc\nimport matplotlib.pyplot as plt\n\n\n\nmodo = 1\nKRUSKAL = 0\nPRIM = 1\nG = nx.Graph()\nE = dict()\nV = set()\nadvance = True\nclicked = False\nfig, ax = plt.subplots(figsize=(10,8))\n\n# Writer = animation.writers['ffmpeg']\n# writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)\n\ndef solution_generator():\n global advance, modo\n\n monitor = krus\n if algo == PRIM:\n monitor = prim\n \n monitor._init( V , E )\n while True:\n if(advance):\n if(modo == 1):\n advance = False\n seq = [ monitor._next() for i in range( 3 ) ]\n if(seq.pop()):\n yield pretty_vars(monitor.get_variables(), False)\n else:\n break\n else: \n yield pretty_vars(monitor.get_variables(), False)\n yield pretty_vars(monitor.get_variables(), True)\n\ndef pretty_vars( mst_vars , end):\n\n global edge_status , current_edge , Va , Ea\n edge_status , current_edge , Va , Ea = mst_vars\n s = ''\n s += \"edge_status = {}\".format( edge_status ) + \"\\n\"\n s += \"current_edge = {} {}\".format( *current_edge ) + \"\\n\"\n s += \"nodes in tree: \" + \"\\n\"\n s += \"\\t\" + ' '.join( Va ) + \"\\n\"\n s += \"edges in tree:\" + \"\\n\"\n for a , b in Ea:\n s += \"\\t\" + \"{} {}\".format( a , b ) +\"\\n\"\n if end:\n current_edge = None\n return s\n\ndef do_nothing():\n # FuncAnimation requires an initialization function. We don't\n # do any initialization, so we provide a no-op function.\n pass\n\n#FIXME - reduce only to drawing\ndef update(mst_edges):\n current_edges = set()\n current_edges.add(current_edge)\n ax.clear()\n\n all_edges = set(tuple(sorted((n1, n2))) for n1, n2 in G.edges())\n node_labels = {}\n\n for idx, node in enumerate(G.nodes()): \n node_labels[node] = node\n\n nx.draw_networkx_edges(\n G, pos, edgelist=all_edges-Ea - current_edges, alpha=0.1,\n edge_color='g', width=1, ax=ax\n )\n\n labels = nx.get_edge_attributes(G,'weight')\n nx.draw_networkx_edges(\n G, pos, edgelist=Ea - current_edges , alpha=1.0,\n edge_color='green', width=1, ax=ax\n )\n if(current_edge != None):\n nx.draw_networkx_edges(\n G, pos, edgelist=current_edges , alpha=1.0,\n edge_color='r', width=1, ax=ax\n )\n nx.draw_networkx_nodes(G, pos, nodelist=G.nodes()-Va, node_color='gray', alpha=0.5, node_size=300, ax=ax)\n nx.draw_networkx_nodes(G, pos, nodelist=Va, node_color='b', alpha=0.5, node_size=300, ax=ax)\n nx.draw_networkx_edge_labels(G,pos,edge_labels=labels, alpha=0.5, ax=ax)\n nx.draw_networkx_labels(G, pos, node_labels, alpha=1, ax=ax)\n\n\ndef on_press(event):\n global advance, modo\n if (modo == 1):\n advance = not advance\n\nfig.canvas.mpl_connect('key_press_event', on_press)\n\ndef main():\n global pos, algo, ani, modo\n print( \"escolha o algoritimo:\" )\n print( \"0 - kruskal\" )\n print( \"1 - prim\" )\n algo = int( input() )\n\n print( \"selecione o modo:\" )\n print( \"0 - direto\" )\n print( \"1 - controlado\" )\n modo = int( input() )\n\n print()\n print( \"digite os vertices do grafo\" )\n print( 'formato: m m i' )\n print( \"m -> minuscula\")\n print( \"i -> inteiro\" )\n print( \"digite -1 se acabou\")\n\n while True:\n tup = input().rstrip()\n if tup == \"-1\":\n break\n a , b , m = tup.split()\n E[ ( a , b ) ] = int( m )\n V.add( a )\n V.add( b )\n\n \n G.add_nodes_from(V)\n\n for key in E.keys():\n print(key[0], key[1], E[key])\n G.add_edge(key[0], key[1], weight = E[key])\n\n pos = nx.random_layout(G)\n\n node_labels = {}\n\n for idx, node in enumerate(G.nodes()): \n node_labels[node] = node\n\n\n #ani = Player(fig, krus, ax, G, V, E, pos, nx.get_edge_attributes(G,'weight') ,node_labels)\n ani = animation.FuncAnimation(\n fig,\n update,\n init_func=do_nothing,\n frames=solution_generator,\n interval=500,\n repeat = False\n )\n plt.show()\n\n\n\nmain()"
},
{
"alpha_fraction": 0.47340425848960876,
"alphanum_fraction": 0.478723406791687,
"avg_line_length": 23.310344696044922,
"blob_id": "eacf8ba7ee6f1f215add491a4628bac63a0c9849",
"content_id": "c01cedf58939395829d05c8e7a8e32801a81585c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2820,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 116,
"path": "/CLI_input.py",
"repo_name": "LucasBR96/MST-ANIMATION",
"src_encoding": "UTF-8",
"text": "import string\nimport sys\n\n# GLOBAL VARS --------------------------------------------------\nEXIT_CHAR = \"*\"\nEND_CHAR = \"-1\"\nMAX_NODES = 100\n\nCHAR_TERMS = {\n \"a\":\"algo\" , \n \"r\":\"exec\" , \n \"b\":\"build\" \n}\n\nVALID_CHOICES = {\n \"algo\" :[ \"PRIM\", \"KRUSKAL\" ],\n \"exec\" :[ \"DIRECT\", \"ITER\" ],\n \"build\":[ \"CUSTOM\" , \"RANDOM\"],\n}\n\ninput_info = dict()\n\n# FUNCTIONS ---------------------------------------------------\n\ndef random_build():\n\n # setting number of nodes\n print(\"digite a quantidade de nos\")\n print(\"maximo -> {}\".format( MAX_NODES ) )\n\n while True:\n\n c = input()\n if c == EXIT_CHAR: raise InterruptedError\n if all( x in string.digits for x in c ):\n m = int( c )\n if m <= MAX_NODES:\n input_info[ \"num_nodes\" ] = m\n break\n print( \"entrada invalida, digite novamente\")\n print()\n \n min_edges = m - 1 # A tree, Basicaly\n max_edges = m**2 - m # Fully connected\n print(\"digite a quantidade de arestas\")\n print(\"maximo -> {}\".format( max_edges ) ) \n print(\"minimo -> {}\".format( min_edges ) )\n while True:\n\n c = input()\n if c == EXIT_CHAR: raise InterruptedError\n if all( x in string.digits for x in c ):\n m = int( c )\n if min_edges <= m <= max_edges:\n input_info[ \"num_edges\" ] = m\n break\n print( \"entrada invalida, digite novamente\") \n print()\n\ndef custom_build( ):\n\n print( \"digite os vertices do grafo\" )\n print( 'formato: m m i' )\n print( \"m -> minuscula\")\n print( \"i -> inteiro\" )\n print( \"digite -1 se acabou\")\n\n input_info[ \"nodes\" ] = set()\n input_info[ \"edges\" ] = dict()\n while True:\n tup = input().rstrip()\n if tup == END_CHAR:\n break\n elif tup == EXIT_CHAR: raise InterruptedError\n\n s = tup.split()\n if len( s ) != 3 or s[-1] not in string.digits:\n print( \"entrada invalida, digite novamente\") \n \n a , b , c = s\n input_info[ \"edges\" ][ ( a , b ) ] = int( c )\n input_info[ \"nodes\" ].add( a )\n input_info[ \"nodes\" ].add( b )\n print()\n\ndef char_choice( ch , nome ):\n\n if ch not in CHAR_TERMS:\n raise ValueError\n term = CHAR_TERMS[ ch ]\n\n nom = nome.upper()\n if nom not in VALID_CHOICES[ term ]:\n raise ValueError\n \n input_info[ term ] = nom\n \ndef main( args ):\n\n i = 0\n while i < len( args ):\n m = args[i]\n if m[ 0 ] == '-':\n char_choice( m[1] , args[ i + 1 ] )\n i += 2\n\n build_fun = custom_build\n if input_info[ \"build\" ] == \"RANDOM\":\n build_fun = random_build\n build_fun()\n \n print( *input_info.items() , sep = \"\\n\")\n\n\nif __name__ == \"__main__\":\n main( sys.argv[ 1: ] )\n"
},
{
"alpha_fraction": 0.47040972113609314,
"alphanum_fraction": 0.474962055683136,
"avg_line_length": 21.355932235717773,
"blob_id": "5702b2a9c3dbe050feb42ad71ef0ffcf24c1f67c",
"content_id": "3f7f02b08c32c5a4e80afea3f6b147d444bdd6f3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1318,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 59,
"path": "/monitor_test.py",
"repo_name": "LucasBR96/MST-ANIMATION",
"src_encoding": "UTF-8",
"text": "import kruskal_monitor as krus\nimport prim_monitor as prim\n\nKRUSKAL = 0\nPRIM = 1\n\ndef solution_generator( V , E , algo ):\n\n monitor = krus\n if algo == PRIM:\n monitor = prim\n \n monitor._init( V , E )\n while monitor._next():\n yield monitor.get_variables()\n\ndef pretty_vars( mst_vars ):\n\n edge_status , current_edge , Va , Ea = mst_vars\n s = ''\n s += \"edge_status = {}\".format( edge_status ) + \"\\n\"\n s += \"current_edge = {} {}\".format( *current_edge ) + \"\\n\"\n s += \"nodes in tree: \" + \"\\n\"\n s += \"\\t\" + ' '.join( Va ) + \"\\n\"\n s += \"edges in tree:\" + \"\\n\"\n for a , b in Ea:\n s += \"\\t\" + \"{} {}\".format( a , b ) +\"\\n\"\n return s\n\ndef main():\n\n print( \"escolha o algoritimo:\" )\n print( \"0 - kruskal\" )\n print( \"1 - prim\" )\n n = int( input() )\n\n print()\n print( \"digite os vertices do grafo\" )\n print( 'formato: m m i' )\n print( \"m -> minuscula\")\n print( \"i -> inteiro\" )\n print( \"digite -1 se acabou\")\n\n E = dict()\n V = set()\n while True:\n tup = input().rstrip()\n if tup == \"-1\":\n break\n a , b , m = tup.split()\n E[ ( a , b ) ] = int( m )\n V.add( a )\n V.add( b )\n\n for x in solution_generator( V , E , n ):\n input()\n print( pretty_vars( x ) )\n\nmain()"
},
{
"alpha_fraction": 0.44062501192092896,
"alphanum_fraction": 0.45223215222358704,
"avg_line_length": 19.9158878326416,
"blob_id": "77efa2c97f556339459e079dbdd7bbbbe9ea8bf5",
"content_id": "611d95e5c25f85ce5e7914949bc9f28f281c97a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2240,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 107,
"path": "/kruskal_monitor.py",
"repo_name": "LucasBR96/MST-ANIMATION",
"src_encoding": "UTF-8",
"text": "#GLOBAL VARIABLE VISIBLE BY FILE ONLY ------------------------------------------------\nEND = -1\nSELECT = 0\nCONSIDER = 1\nUPDATE = 2\nglobal_status = SELECT\n\nE_prime = []\nT = dict()\n\n#GLOBAL VARIABLE VISIBLE BY OUTSIDERS------------------------------------------------\nCONSIDERED = 0\nREJECTED = 1\nACCEPTED = 2\nedge_status = CONSIDERED\ncurrent_edge = ( -1 , -1 )\n\nVa = set()\nEa = set()\n\nN = 0\npos = 0 \n\n#MONITOR FUNCTIONS------------------------------------------------------------------\ndef _select_fun():\n\n global current_edge, edge_status, global_status\n current_edge = E_prime[ pos ]\n edge_status = CONSIDERED\n global_status = CONSIDER\n\ndef _consider_fun( ):\n\n global edge_status, global_status\n ( x , y ) = current_edge\n r1 = T[ x ]\n r2 = T[ y ]\n edge_status = REJECTED \n if r1 != r2: \n edge_status = ACCEPTED\n global_status = UPDATE\n\ndef _update_fun( ):\n\n global Va, Ea, current_edge, edge_status, T, global_status\n if edge_status == ACCEPTED:\n ( x , y ) = current_edge\n Va.add( x )\n Va.add( y )\n Ea.add( ( x , y ) )\n n = T[ x ]\n for a in T:\n if T[ a ] == n: T[ a ] = T[ y ]\n \n global pos , N\n pos = pos + 1\n global_status = SELECT if pos < N else END\n\ndef _init( V , E ):\n\n global E_prime , N\n E_prime = [ tup for tup in E ]\n E_prime.sort( key = lambda x : E[ x ] )\n N = len( E_prime )\n\n global T\n T = { v:i for i , v in enumerate( V ) } \n\ndef _next( ):\n\n if global_status == END:\n return False\n\n if global_status == SELECT:\n _select_fun()\n elif global_status == CONSIDER:\n _consider_fun()\n elif global_status == UPDATE:\n _update_fun()\n return True\n\ndef get_variables():\n\n return( edge_status , current_edge , Va.copy() , Ea.copy() )\n\nif __name__ == \"__main__\":\n\n V = set( [\"a\" , \"b\" , \"c\" , \"d\", \"e\" ] )\n E = {\n ('a','b'):2,\n ('a','c'):3,\n ('a','d'):4,\n ('c','d'):1,\n ('b','d'):2,\n ('d','e'):7,\n ('c','e'):3,\n ('a','e'):2\n }\n\n _init( V , E )\n while _next():\n input()\n t = get_variables()\n print( \"-\"*25 )\n print( *t , sep = \"\\n\" )\n\n pass\n\n\n"
},
{
"alpha_fraction": 0.43642815947532654,
"alphanum_fraction": 0.4512122869491577,
"avg_line_length": 22.46527862548828,
"blob_id": "8155a00cda8dc2b087f1ddae5c770ece5ab58300",
"content_id": "801ac1eeeaac134142c2bcff60263d4281043007",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3382,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 144,
"path": "/prim_monitor.py",
"repo_name": "LucasBR96/MST-ANIMATION",
"src_encoding": "UTF-8",
"text": "from collections import deque\n\n#GLOBAL VARIABLE VISIBLE BY FILE ONLY ------------------------------------------------\nEND = -1\nSELECT = 0\nCONSIDER = 1\nUPDATE = 2\nglobal_status = SELECT\nPossible_neighbors = deque([])\nAdj_lst = dict()\nE_val = dict()\n\n#GLOBAL VARIABLE VISIBLE BY OUTSIDERS------------------------------------------------\nCONSIDERED = 0\nREJECTED = 1\nACCEPTED = 2\nedge_status = CONSIDERED\ncurrent_edge = ( -1 , -1 )\nVa = set()\nEa = set()\n\n#AUXILIARY FUNCTIONS ---------------------------------------------------------------\ndef intercal( arr1 , arr2 , foo ):\n\n i , j = 0 , 0\n seq = []\n\n while i < len( arr1 ) or j < len( arr2 ):\n\n if i >= len( arr1 ):\n seq.append( arr2[ j ] )\n j += 1\n elif j >= len( arr2 ):\n seq.append( arr1[ i ] )\n i += 1\n elif foo( arr1[ i ] ) < foo( arr2[ j ] ):\n seq.append( arr1[ i ] )\n i +=1\n else:\n seq.append( arr2[ j ] )\n j += 1\n \n return seq\n\n#MONITOR FUNCTIONS------------------------------------------------------------------\n\ndef _select_fun():\n\n global Possible_neighbors , current_edge, edge_status , global_status\n current_edge = Possible_neighbors.popleft()\n edge_status = CONSIDERED\n global_status = CONSIDER \n\ndef _consider_fun():\n\n global edge_status, global_status\n a , b = current_edge\n r1 = a in Va\n r2 = b in Va\n edge_status = ACCEPTED if r1^r2 else REJECTED\n global_status = UPDATE\n\ndef _update_fun():\n\n global global_status\n if edge_status == ACCEPTED:\n \n global Ea, Va\n Ea.add( current_edge )\n ( a , b ) = current_edge\n y = a if b in Va else b\n Va.add( y )\n\n global Adj_lst, E_val, Possible_neighbors\n new_edges = [ tup for tup in Adj_lst[ y ] if tup != ( a , b ) ]\n Possible_neighbors = deque( intercal( Possible_neighbors , new_edges , lambda x: E_val[ x ] ) )\n \n global_status = END if len( Possible_neighbors ) == 0 else SELECT\n\ndef _init( V , E ):\n\n global E_val\n E_val = E\n\n E_set = list( tup for tup in E )\n E_set.sort( key = lambda x: E[ x ] )\n\n global Adj_lst\n for edge in E_set:\n ( a , b ) = edge\n Adj_lst[ a ] = Adj_lst.get( a , [] ) + [ edge ]\n Adj_lst[ b ] = Adj_lst.get( b , [] ) + [ edge ]\n \n global Possible_neighbors , Va, Ea\n Possible_neighbors.extend( Adj_lst[ a ] )\n Va.add( a )\n\ndef get_variables():\n\n return( edge_status , current_edge , Va.copy() , Ea.copy() )\n\ndef _next():\n\n if global_status == END:\n return False\n\n if global_status == SELECT:\n _select_fun()\n elif global_status == CONSIDER:\n _consider_fun()\n elif global_status == UPDATE:\n _update_fun()\n return True\n\nif __name__ == \"__main__\":\n\n # E = dict()\n E = {\n ('a','b') :1 ,\n ('a', 'd'): 2,\n ('a', 'i'): 7,\n ('b', 'c'): 3,\n ('b', 'd'): 5,\n ('c', 'd'): 3,\n ('c', 'e'): 2,\n ('d', 'i'): 1,\n ('d', 'e'): 2,\n ('e', 'f'): 3,\n ('e', 'g'): 4,\n ('e', 'h'): 2,\n ('f', 'g'): 8,\n ('g', 'h'): 2,\n ('h', 'i'): 10\n }\n V = set()\n _init( V , E )\n while _next():\n # input()\n print()\n t = get_variables()\n print( \"-\"*25 )\n print( *t , sep = \"\\n\" )\n\n pass\n\n\n\n"
}
] | 6 |
simonrobb/touch-logger | https://github.com/simonrobb/touch-logger | 6dc840271eb99e018b89566b21f03a35bdb9af2d | 72154a75e2628fc4d8cddea3babeaa96da0a87c5 | 2a4f6b5540a1cb515fbbfae182e5cec512a2dd11 | refs/heads/master | 2022-06-27T20:58:19.418167 | 2017-09-08T13:49:33 | 2017-09-08T13:49:33 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6564551591873169,
"alphanum_fraction": 0.7330415844917297,
"avg_line_length": 27.625,
"blob_id": "0a6dd30b34dadb2499fd7e7d2d2b2e17c5d7fd71",
"content_id": "738dbb425c397a8f36aa16e5bc57cf3905072fbd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 457,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 16,
"path": "/src/main.py",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "from lib import Logger\nfrom app.Handlers import Touch, SDI\n\n# Settings\ni2c_addr_10 = 0x21\ni2c_addr_20 = 0x22\ni2c_addr_30 = 0x23\ninterval = 10\n\n# Initialise and start the logger\nlogger = Logger.Logger(interval)\n# logger.add_handler('touch', Touch.Handler(10, i2c_addr_10))\n# logger.add_handler('touch', Touch.Handler(20, i2c_addr_20))\n# logger.add_handler('touch', Touch.Handler(30, i2c_addr_30))\nlogger.add_handler('touch_sdi', SDI.Handler())\nlogger.start()"
},
{
"alpha_fraction": 0.6353383660316467,
"alphanum_fraction": 0.6428571343421936,
"avg_line_length": 20.31999969482422,
"blob_id": "0ee3fc00c020cd0d7a15c66a626ca8d1321e6a94",
"content_id": "2bc61a1c8c7599cb1012c0998990e0d49a67b7ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 532,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 25,
"path": "/src/lib/Logger.py",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "import time\nfrom lib import InfluxAdapter\n\nclass Logger:\n interval = 60\n active = False\n handlers = {}\n\n def __init__(self, interval = 60):\n self.interval = interval\n\n def add_handler(self, name, handler):\n self.handlers[name] = handler\n\n def start(self):\n self.active = True\n while self.active:\n for name, handler in self.handlers.iteritems():\n fields = handler()\n if fields:\n InfluxAdapter.write(name, fields)\n time.sleep(self.interval)\n\n def stop(self):\n self.active = False"
},
{
"alpha_fraction": 0.6280992031097412,
"alphanum_fraction": 0.6314049363136292,
"avg_line_length": 22.30769157409668,
"blob_id": "2066776b5b1917ae6498743d7ca96a49be9c843f",
"content_id": "dda7df33cde38477ec8aa4d4533c621e02a9f347",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 605,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 26,
"path": "/src/app/Handlers/Touch.py",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "import datetime\nfrom lib.I2CMoistureSensor import Sensor\n\ndef Handler(depth, address):\n class context:\n chirp = None\n\n def handler():\n if (context.chirp is None):\n context.chirp = Sensor(1, address)\n\n try:\n capacitance = context.chirp.moist()\n temperature = context.chirp.temp()\n print \"%s\\t%d\\t\\t%d\" % (str(datetime.datetime.now()), capacitance, temperature)\n\n fields = {\n (\"capacitance_\" + str(depth)): capacitance,\n (\"temperature_\" + str(depth)): temperature\n }\n return fields\n\n except IOError:\n context.chirp = None\n\n return handler"
},
{
"alpha_fraction": 0.739393949508667,
"alphanum_fraction": 0.760606050491333,
"avg_line_length": 46.21428680419922,
"blob_id": "0e19fe66e1f2a9c52a4921355b535eda95cb901d",
"content_id": "c1b23884ef32169f8eaff738912b0fc840f7a940",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 663,
"license_type": "no_license",
"max_line_length": 194,
"num_lines": 14,
"path": "/README.md",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "## Requirements\n - A Raspberry Pi 3 is strongly recommended if the Pi will be displaying the gathered data in Chronograf. Alternatively if the Pi is used as a headless web server, an older version may be used.\n - An I2C moisture sensor on address 0x20\n - A pot plant 🌻\n\n## Provision\nRun `sudo sh script/provision.sh` on a fresh installation of Jessie.\n\nEnable the I2C bus using `sudo raspi-config` and restart the pi.\n\n## Start the application\nRun `sudo sh script/start.sh` to spawn the python process. Go to `http://localhost:8888` to view the gathered data in Chronograf.\n\nAlternatively view Chronograf on another machine using `http://<pi_address>:8888`."
},
{
"alpha_fraction": 0.5397553443908691,
"alphanum_fraction": 0.5711008906364441,
"avg_line_length": 24.173076629638672,
"blob_id": "aaec769c3699bf7b89437cbe7787bf4aba7b0be3",
"content_id": "3330330d2c9b60cbbb62a0616db182de180a97fb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1308,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 52,
"path": "/src/app/Handlers/SDI.py",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "import time\nimport serial\n\ndef Handler():\n class context:\n sensor = None\n\n def sendCommand(command):\n sdi.write(command)\n response = ''\n lastEvent = time.time()\n while ((time.time() - lastEvent)<0.023):\n c = sdi.read()\n if (c):\n response += c\n lastEvent = time.time()\n return response\n\n def handler():\n if (context.sensor is None):\n context.sensor = serial.Serial(\"/dev/serial0\", baudrate=9600, timeout=1.0)\n time.sleep(5)\n\n try:\n # Take reading\n sendCommand('0M!')\n\n # Get results of reading\n # context.sensor.write('0D0!')\n # moistures = result[2:29].split('+')\n\n # Get results of reading\n # context.sensor.write('0D0!')\n # result = context.sensor.read(32)\n # temperatures = result[2:29].split('+')\n\n # fields = {\n # \"moisture_1\": float(moistures[0]),\n # \"moisture_2\": float(moistures[1]),\n # \"moisture_3\": float(moistures[2]),\n # \"moisture_4\": float(moistures[3]),\n # \"temperature_1\": float(temperatures[0]),\n # \"temperature_2\": float(temperatures[1]),\n # \"temperature_3\": float(temperatures[2]),\n # \"temperature_4\": float(temperatures[3])\n # }\n # return fields\n\n except IOError:\n context.sensor = None\n\n return handler"
},
{
"alpha_fraction": 0.6926407217979431,
"alphanum_fraction": 0.6984127163887024,
"avg_line_length": 22.133333206176758,
"blob_id": "ca9607ce14e37eb4dd68a475a2db0e69f0810823",
"content_id": "4c99f6ea72d771bdafc49ce4884dd8f80b361fcb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 693,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 30,
"path": "/src/lib/InfluxAdapter.py",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "from influxdb import InfluxDBClient\nimport time\n\n# Don't consume directly, use the get_adapter method\ninflux = None\n\n# Settings\ninflux_host = \"localhost\"\ninflux_port = 8086\ninflux_user = \"admin\"\ninflux_password = \"admin\"\ninflux_dbname = \"Logger\"\n\n# Get or create the InfluxDB object\ndef get_adapter():\n global influx\n if (influx is None):\n influx = InfluxDBClient(influx_host, influx_port, influx_user, influx_password, influx_dbname)\n return influx\n\n# Write to influx\ndef write(measurement, fields, tags = {}):\n iso = time.ctime()\n json_body = [{\n \"measurement\": measurement,\n \"tags\": tags,\n \"time\": iso,\n \"fields\": fields\n }]\n get_adapter().write_points(json_body)"
},
{
"alpha_fraction": 0.6804798245429993,
"alphanum_fraction": 0.6968374848365784,
"avg_line_length": 23.810810089111328,
"blob_id": "a3aa5dac9f7d041b6e5ad38ed50cfc0061648b81",
"content_id": "fe19cc8344f7a0782f3233230f8acb35faaadf9d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 917,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 37,
"path": "/script/provision.sh",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\nblock_comment()\n{\n COMMENT=$1\n printf \"\\n\\n\"\n printf \"###########################\\n\"\n printf \"#\\n\"\n printf \"# $COMMENT\\n\"\n printf \"#\\n\"\n printf \"###########################\\n\"\n printf \"\\n\"\n}\n\nblock_comment \"Update pi packages\"\nsudo apt-get -y update\nsudo apt-get -y upgrade\n\n# Add the influx repos\nblock_comment \"Install influxdb\"\nwget https://dl.influxdata.com/influxdb/releases/influxdb_1.2.4_armhf.deb\nsudo dpkg -i influxdb_1.2.4_armhf.deb\n\n# Install chronograf\nblock_comment \"Install chronograf\"\nwget https://dl.influxdata.com/chronograf/releases/chronograf_1.3.3.4_armhf.deb\nsudo dpkg -i chronograf_1.3.3.4_armhf.deb\n\n# Install pip and dependencies\nblock_comment \"Install pip and python dependencies\"\nsudo apt-get install -y python-pip\nsudo pip install influxdb\nsudo pip install --upgrade influxdb\n\n# Seed influx\nblock_comment \"Seeding Influx database\"\ninflux -execute \"CREATE DATABASE Logger\""
},
{
"alpha_fraction": 0.738095223903656,
"alphanum_fraction": 0.738095223903656,
"avg_line_length": 20.25,
"blob_id": "6a7e8999206882aa207b23b31210eea5297f4df5",
"content_id": "8495ac6a9626f79c8f6806ab71ba2a56ff6af8ad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 84,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 4,
"path": "/script/start.sh",
"repo_name": "simonrobb/touch-logger",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\nprintf \"Spawning logger process\\n\"\nnohup python src/main.py >Logger.out &"
}
] | 8 |
raywan/my-blood-is-coffee | https://github.com/raywan/my-blood-is-coffee | 5d1b2c7fc61183e1edbbb05f656d472f037e71cd | 36cc9ad4b04a8ab708e9f803a2f862e6255e3dd8 | 2ee681e402d5346fa4d5bd62edcd8e63e6ecff51 | refs/heads/master | 2020-03-23T06:14:35.301781 | 2018-07-17T19:41:47 | 2018-07-17T19:41:47 | 141,198,105 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6915520429611206,
"alphanum_fraction": 0.6915520429611206,
"avg_line_length": 21.086956024169922,
"blob_id": "3b4dac376f588d13779a658dd103c688227a776a",
"content_id": "7d60aa169645b472092c94e2b581cd7706e1b615",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 509,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 23,
"path": "/main.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "bump = require 'lib.bump'\nGameState = require 'lib.hump.gamestate'\n\nlocal main_menu = require 'main_menu'\nlocal game = require 'game'\nlocal pause = require 'pause'\n\nfunction love.load()\n GameState.registerEvents()\n GameState.switch(game)\nend\n\nfunction love.keypressed(key)\n if key == \"escape\" then\n love.event.push(\"quit\")\n end\n\n if GameState.current() ~= main_menu and GameState.current() ~= pause and key == 'p' then\n GameState.push(pause)\n elseif key == 'p' then\n GameState.pop()\n end\nend\n\n"
},
{
"alpha_fraction": 0.7362637519836426,
"alphanum_fraction": 0.7362637519836426,
"avg_line_length": 9.11111068725586,
"blob_id": "f0cd09cc8dcecfa34171f85e4e8cf0476e8ae8df",
"content_id": "8c5fd029ec665ced4f038b4471f83d8e138beee9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 91,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 9,
"path": "/maze.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local maze = {}\n\nfunction maze.generate(size)\nend\n\nfunction maze.expand()\nend\n\nreturn maze\n"
},
{
"alpha_fraction": 0.6494845151901245,
"alphanum_fraction": 0.6907216310501099,
"avg_line_length": 12.857142448425293,
"blob_id": "d7d7e43a3fe968527cc0c31088d95d01cf40e26e",
"content_id": "c4813a716987636aa0ed6fc6de204300fe1bd2ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 97,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 7,
"path": "/pause.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local pause = {}\n\nfunction pause:draw()\n love.graphics.print(\"PAUSE\", 10, 10)\nend\n\nreturn pause\n"
},
{
"alpha_fraction": 0.6181649565696716,
"alphanum_fraction": 0.6475131511688232,
"avg_line_length": 25.752065658569336,
"blob_id": "3cb9843a0b7fcecea6daffbee3727bf4ecf612c0",
"content_id": "a561fba6ca47a850a4a4a4aa2c0c3bc88c39c36b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 3237,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 121,
"path": "/player.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "Class = require 'lib.hump.class'\n\nPlayer = Class{}\n\nfunction Player:init(world, x, y)\n love.graphics.setDefaultFilter(\"nearest\", \"nearest\")\n self.hero_atlas = love.graphics.newImage(\"assets/hero.png\")\n self.hero_sprite = love.graphics.newQuad(32, 16, 16, 16, self.hero_atlas:getDimensions())\n\n self.world = world\n self.world:add(self, x, y, 16, 16)\n\n self.x = x\n self.y = y\n self.x_vel = 0\n self.y_vel = 0\n self.fps = 10\n self.anim_timer = 1/10\n self.cur_frame = 0\n self.num_frames = 5\n self.xoffset = 0\n self.cur_dir = 1\n\n self.acc = 20\n self.gravity = 20\n self.max_speed = 200\n self.friction = 5\n self.is_grounded = true\n self.is_jumping = false\n self.max_height_reached = false\n self.jump_acc = 50\n self.jump_max_speed = 9.0\n\n self.is_attacking = false\n self.is_dashing = 0\n\n self.health = 100\n self.coffee = 100\n self.hit_target = false\n self.num_coffees_drank = 0\nend\n\nfunction Player:place(x, y)\n self.world:update(self, x, y, 16, 16)\nend\n\nfunction Player:update(dt, d_pressed)\n local prev_x, prev_y = self.x, self.y\n\n self.x_vel = self.x_vel * (1 - math.min(dt * self.friction, 1))\n self.y_vel = self.y_vel * (1 - math.min(dt * self.friction, 1))\n\n if love.keyboard.isDown(\"up\") then\n self.y_vel = self.y_vel - self.acc * dt\n self.is_moving = true\n end\n if love.keyboard.isDown(\"down\") then\n self.y_vel = self.y_vel + self.acc * dt\n self.is_moving = true\n end\n if love.keyboard.isDown(\"left\") then\n self.x_vel = self.x_vel - self.acc * dt\n self.is_moving = true\n -- self.cur_dir = -1\n end\n if love.keyboard.isDown(\"right\") then\n self.x_vel = self.x_vel + self.acc * dt\n self.is_moving = true\n -- self.cur_dir = 1\n end\n if love.keyboard.isDown(\"s\") then\n end\n -- if love.keyboard.isDown(\"d\") and not self.d_pressed then\n if d_pressed then\n self.coffee = self.coffee + 30*math.exp(-self.num_coffees_drank+1)\n self.num_coffees_drank = self.num_coffees_drank + 1\n end\n\n -- Decay the coffee amount\n self.coffee = self.coffee - dt\n\n local goal_x = self.x + self.x_vel\n local goal_y = self.y + self.y_vel\n\n self.x, self.y, collisions, len = self.world:move(self, goal_x, goal_y)\n for i=1,len do -- If more than one simultaneous collision, they are sorted out by proximity\n local col = collisions[i]\n if col.other.name == \"target\" then\n self.hit_target = true\n end\n -- print((\"Collision with %s.\"):format(col.other.name))\n end\n\n -- HANDLE ANIMATION\n if self.is_moving then\n self.anim_timer = self.anim_timer - dt\n if self.anim_timer <= 0 then\n self.anim_timer = 1/self.fps\n self.cur_frame = self.cur_frame + 1\n if self.cur_frame > 5 then self.cur_frame = 0 end\n self.xoffset = 16 * self.cur_frame\n self.hero_sprite:setViewport(self.xoffset, 16, 16, 16)\n end\n else\n self.cur_frame = 0\n self.hero_sprite:setViewport(0, 0, 16, 16)\n end\n\n self.is_moving = false\n\n local dx,dy = self.x - camera.x, self.y - camera.y\n camera:move(dx/2, dy/2)\nend\n\nfunction Player:draw()\n love.graphics.draw(self.hero_atlas, self.hero_sprite, self.x, self.y, 0, 1*self.cur_dir, 1, 0, 0)\n -- love.graphics.setColor(255, 0, 0)\n -- love.graphics.rectangle(\"fill\", self.x-8, self.y, 16,16)\nend\n\nreturn Player\n"
},
{
"alpha_fraction": 0.6620370149612427,
"alphanum_fraction": 0.6805555820465088,
"avg_line_length": 15.615385055541992,
"blob_id": "7f2edd07ea14dc9419a98d7e7d27fcbfc3f79ea5",
"content_id": "3dbcfdc6d2e6f6fe8438d8afce8572c0e0707c95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 216,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 13,
"path": "/game_over.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local game_ver = {}\n\nfunction game_ver:draw()\n love.graphics.print(\"GAME OVER\", 10, 10)\nend\n\nfunction game_ver:keyreleased(key, code)\n if key == 'return' then\n -- GameState.push(game)\n end\nend\n\nreturn game_ver\n"
},
{
"alpha_fraction": 0.5096153616905212,
"alphanum_fraction": 0.5336538553237915,
"avg_line_length": 16.33333396911621,
"blob_id": "4f66da48e3ed0baadd8c1c8c07ecd890ef685072",
"content_id": "0825217cc69e8ec6bcd1326911c62dc1b5d0ae31",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 624,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 36,
"path": "/genmaze.py",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "import random\n\ndef gen_maze(size):\n # Place the player\n maze = []\n visited = []\n for i in range(0, size):\n new_row = []\n new_row_2 = []\n for j in range(0,size):\n new_row.append(0)\n new_row_2.append(0)\n maze.append(new_row)\n visited.append(new_row_2)\n\n\n # Create the borders\n for i in (0, size - 1):\n for j in range(0,size):\n maze[i][j] = 1\n maze[j][i] = 1\n\n for i in range(0, size):\n print(''.join([str(x) for x in maze[i]]))\n\n for i in range(0, size):\n print(''.join([str(x) for x in visited[i]]))\n\n\n\ndef main():\n gen_maze(5)\n pass\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.7651315927505493,
"alphanum_fraction": 0.7690789699554443,
"avg_line_length": 33.54545593261719,
"blob_id": "72927034b0e3dbfdde3911d799d359c81247c093",
"content_id": "dc167f9401fc5b71d720a742777c39e71c4029d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1520,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 44,
"path": "/README.md",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "# My Blood is Coffee\n\n```\nYou're sleep deprived and you have 3 assigments due.\nYou're only running on coffee.\nWander your own mind and get your work done before you run out of fuel.\nRefill on coffee, but beware of the consequences...\n```\n\nThis was a casual game jam I did over the weekend with my friend, [Zuqi Li](https://github.com/zuqini).\n\nWe generated a theme (Coffee) and both made separate games.\n\nI worked on the game on and off throughout the weekend.\n\nI probably spent a total of 8 hours working on the game as well as learning [Love2D](https://love2d.org/).\n\nThere is one major mechanic left to implement, which I may come back to on another weekend.\n\nAssets are from OpenGameArt\n\nbump.lua and hump.lua were used for convenience.\n\n## How to Play\n\nMove around with arrow keys\n\nDrink coffee by pressing D\n\n## Mechanics\n\nYou're on the clock to reach a target in a generated maze.\nAs the player's `coffee` amount decreases, the player can choose to consume more coffee.\nConsuming more coffee will increase stats like the player's movement speed.\nHowever, consuming more coffee is not without consequences.\nAs the player consumes more coffee, the maze will transform, making it harder.\nNote that the player will not notice these changes, as the changes are made outside\ntheir camera view. Maze generation is based on a recursive DFS, but regeneration is based on\nspatial partitioning of the maze.\n\nThe effectiveness of coffee is based on an exponentially decaying function.\n```\ny = exp(-num_coffees_drank) * 30\n```\n"
},
{
"alpha_fraction": 0.6788617968559265,
"alphanum_fraction": 0.6951219439506531,
"avg_line_length": 15.399999618530273,
"blob_id": "3837b6c5ce4c8e01e27dc5d34f568a14d2d324b7",
"content_id": "33ce63c6db127976b64e3befa0c8f29ef5c493f8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 246,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 15,
"path": "/main_menu.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local main_menu = {}\n\nlocal game = require 'game'\n\nfunction main_menu:draw()\n love.graphics.print(\"Main Menu\", 10, 10)\nend\n\nfunction main_menu:keyreleased(key, code)\n if key == 'return' then\n GameState.push(game)\n end\nend\n\nreturn main_menu\n"
},
{
"alpha_fraction": 0.5761589407920837,
"alphanum_fraction": 0.6556291580200195,
"avg_line_length": 20.571428298950195,
"blob_id": "518e8d12648dbc7ef970ea3d58ef6b6676de16c1",
"content_id": "be2692fd24bd5e6f6f85a79d3539157b8179ddec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 151,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 7,
"path": "/utils.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local Utils = {}\n\nfunction Utils.setColor(r, g, b, a)\n if a == nil then a = 100 end\n love.graphics.setColor(r/255, g/255, b/255, a)\nend\nreturn Utils\n"
},
{
"alpha_fraction": 0.6132450103759766,
"alphanum_fraction": 0.6529801487922668,
"avg_line_length": 19.97222137451172,
"blob_id": "0561a5a69cc785e80093289e07893440db45b1de",
"content_id": "9866d38edb4f98d6ee9c272118b55742782041a7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 755,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 36,
"path": "/vec.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local Vec2 = {}\n\nfunction Vec2:new(xpos, ypos)\n local newObj = {x = xpos, y = ypos}\n self.__index = self\n return setmetatable(newObj, self)\nend\n\nfunction Vec2:dot(v1)\n return (self.x * v1.x) + (self.y * v1.y)\nend\n\nfunction Vec2.add(v1, v2)\n return Vec2:new(v1.x + v2.x, v1.y + v2.y)\nend\n\nfunction Vec2.sub(v1, v2)\n return Vec2:new(v1.x - v2.x, v1.y - v2.y)\nend\n\nfunction Vec2:norm()\n return math.sqrt(math.pow(self.x, 2) + math.pow(self.y, 2))\nend\n\nfunction Vec2.normalize(v)\n local norm = v.norm()\n return Vec2:new(v.x/norm.x, v.y/norm.y)\nend\n\nfunction Vec2:rotate(angle)\n local nx = math.cos(angle) + self.x - math.sin(angle) + self.y\n local ny = math.sin(angle) + self.x + math.cos(angle) + self.y\n return Vec2:new(nx, ny)\nend\n\nreturn Vec2\n"
},
{
"alpha_fraction": 0.5775519013404846,
"alphanum_fraction": 0.6330092549324036,
"avg_line_length": 26.766870498657227,
"blob_id": "b3271bf2872f8f7061254d2513175051fe202f8d",
"content_id": "f7067628e1b27ff849a80d9f486b7ea25854b94d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 4526,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 163,
"path": "/game.lua",
"repo_name": "raywan/my-blood-is-coffee",
"src_encoding": "UTF-8",
"text": "local game = {}\n\ndebug = true\n\nworld = nil\n\n\nlocal Camera = require 'lib.hump.camera'\nlocal Player = require 'player'\nlocal Utils = require 'utils'\n\nlocal ground_0 = {name=\"Ground_0\"}\nlocal ground_1 = {name=\"Ground_1\"}\nlocal target = {name=\"target\"}\nlocal A = {name=\"A\"}\nlocal B = {name=\"B\"}\nlocal walls = {}\nlocal cur_level = 0\n\nlocal game_over = require 'game_over'\nlocal d_pressed = false\n\n-- insert both rectangles into bump\n\nfunction game:init()\n-- print(\"CALLED love.load\")\n player = nil\n world = bump.newWorld(32)\n player = Player(world, 0, 0)\n\n -- world:add(ground_0, 120, 400, 640, 16)\n -- world:add(ground_1, 0, 448, 640, 32)\n -- world:add(A, 0, 0, 64, 256) -- x,y, width, height\n -- world:add(B, 0, -100, 32, 32)\n\n\n -- for i=0,800/16 do\n -- for j=0,600/16 do\n -- sprite_batch:add(i*16, j*16, 0, 16/256, 16/256)\n -- end\n -- end\n\n\n -- Create the camera\n camera = Camera(player.x, player.y, 4)\n -- Add the maze walls into the world\n\n -- for i, s in ipairs(walls) do\n -- print(walls[i].x, walls[i].y)\n -- world:add(walls[i], walls[i].x, walls[i].y, 16, 16)\n -- end\n\n start_time = love.timer.getTime()\n last_time = love.timer.getTime()\nend\n\n\nfunction game:enter()\n cur_level = cur_level + 1\n print(\"Cur level \"..tostring(cur_level))\n\n -- Load assets\n love.graphics.setDefaultFilter(\"nearest\", \"nearest\")\n bg = love.graphics.newImage(\"assets/platformer/bg_castle.png\")\n bg_quad = love.graphics.newQuad(0, 0, 16, 16, bg:getDimensions())\n sprite_batch = love.graphics.newSpriteBatch(bg, 1000)\n\n -- Load assets if necessary\n -- Generate level\n level_path = string.format(\"assets/level_%s.txt\", cur_level)\n print(level_path)\n local level_data = io.open(level_path, \"rb\")\n local lines = {}\n\n local line_num = 1\n while true do\n local line = level_data:read()\n if line == nil then break end\n for i = 1, #line do\n local c = line:sub(i,i)\n if c == '1' then\n print(\"Adding\", i*16, line_num*16)\n local block = {x=i*16,y=line_num*16,w=16,h=16}\n walls[#walls+1] = block\n world:add(block, block.x, block.y, 16, 16)\n -- BUG: Not sure why I need to offset by 1...makes no sense\n sprite_batch:add(bg_quad, (1+i)*16, (1+line_num)*16, 0, 1, 1, 16, 16)\n -- sprite_batch:add(bg_quad, block.x, block.y, 0, 1, 1, 16, 16)\n elseif c == 'T' then\n world:add(target, i*16, line_num*16, 16, 16)\n elseif c == '@' then\n print(\"Adding player\", i*16, line_num * 16)\n -- player = Player(world, i*16, line_num*16)\n player:place(i*16, line_num*16)\n end\n end\n line_num = line_num + 1\n print(line)\n end\n -- Place the player\nend\n\nfunction game:keypressed(key)\n if key == \"d\" then\n d_pressed = true\n end\nend\n\n-- dt is the last time the update function has been called\nfunction game:update(dt)\n if dt > 0.04 then return end\n player:update(dt, d_pressed)\n last_time = love.timer.getTime()\n if player.hit_target and cur_level < 1 then\n -- Set next level\n -- GameState.switch(game_over)\n player.hit_target = false\n GameState.switch(game)\n elseif player.hit_target and cur_level == 1 then\n GameState.switch(game_over)\n elseif player.coffee < 0 then\n GameState.switch(game_over)\n end\n d_pressed = false\nend\n\n\nfunction game:draw()\n -- love.graphics.print('Hello World!', 400, 300)\n -- love.graphics.setColor(100, 100, 0)\n -- love.graphics.rectangle(\"fill\", 120, 100, 100, 50)\n -- love.graphics.rectangle(\"fill\", 100, 400, 100, 50)\n\n Utils.setColor(255, 255, 255)\n love.graphics.clear(0,0,1)\n\n love.graphics.setColor(255, 255, 255)\n love.graphics.print(\"FPS: \"..tostring(love.timer.getFPS()), 10, 10)\n\n camera:attach()\n -- love.graphics.rectangle('fill', world:getRect(ground_0))\n -- love.graphics.rectangle('fill', world:getRect(ground_1))\n -- love.graphics.rectangle('fill', world:getRect(A))\n -- love.graphics.rectangle('fill', world:getRect(B))\n\n\n player:draw()\n -- for i=1,#walls do\n -- love.graphics.rectangle('fill', world:getRect(walls[i]))\n -- end\n love.graphics.draw(sprite_batch)\n Utils.setColor(0, 255, 0)\n love.graphics.rectangle('fill', world:getRect(target))\n camera:detach()\n\n -- Render UI\n Utils.setColor(100, 50, 0)\n love.graphics.print(\"Num Coffees: \"..tostring(player.num_coffees_drank), 20, 20, 0, 2)\n love.graphics.print(\"Coffee: \"..string.format(\"%.2f%%\", player.coffee), 20, 40, 0, 2)\n love.graphics.print(\"Time Elapsed: \"..string.format(\"%.2f sec.\", last_time - start_time), 20, 60, 0, 2)\nend\n\nreturn game\n"
}
] | 11 |
preetijain7681/Manipulator | https://github.com/preetijain7681/Manipulator | b2db11a759e2507319689b5a6b91540561a8dd1d | f6390ae45054180440cb3aa36df04a7b6a029dcc | d96b7a06237c90eba329166fe56714c2ef387e86 | refs/heads/master | 2023-06-05T17:57:11.224887 | 2021-05-18T12:36:35 | 2021-05-18T12:36:35 | 292,330,596 | 1 | 2 | null | null | null | null | null | [
{
"alpha_fraction": 0.3956655263900757,
"alphanum_fraction": 0.4973149299621582,
"avg_line_length": 35.33797836303711,
"blob_id": "07ed3f35f26090b33844786cbfb57d37aebcec52",
"content_id": "71c4d199b103fc60e3a692ffe4e8bf3acbdb3d87",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10428,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 287,
"path": "/Milestone_2/scripts/milestone2_SH.py",
"repo_name": "preetijain7681/Manipulator",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n'''\nOutputs: outputs functions that initialize the robot \n'''\nimport csv\nimport modern_robotics as mr\nimport numpy as np\nimport math\n\ndef TrajectoryGenerator(Xstart, Xend, Tf, N,gripper_state,write):\n \"\"\"Computes a trajectory as a list of N SE(3) matrices corresponding to\n the straight line motion. \n :param Xstart: The initial end-effector configuration\n :param Xend: The final end-effector configuration\n :param Tf: Total time of the motion in seconds from rest to rest\n :param N: The number of points N > 1 (Start and stop) in the discrete\n representation of the trajectory\n :param gripper_state: 0- open, 1-close\n :write: a csv_write object\n :return: The discretized trajectory as a list of N matrices in SE(3)\n separated in time by Tf/(N-1). The first in the list is Xstart\n and the Nth is Xend. R is the rotation matrix in X, and p is the linear position part of X. \n 13-array: [9 R variables (from first row to last row), 3 P variables (from x to z), gripper_state ]\n Example Input:\n Xstart = np.array([[1, 0, 0, 1],\n [0, 1, 0, 0],\n [0, 0, 1, 1],\n [0, 0, 0, 1]])\n Xend = np.array([[0, 0, 1, 0.1],\n [1, 0, 0, 0],\n [0, 1, 0, 4.1],\n [0, 0, 0, 1]])\n Tf = 5\n N = 4\n gripper_state = 0\n write = csv.writer(csv_file,delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n Output:\n [1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.1992,0.0,0.7535,0.0]\n \"\"\"\n N = int(N)\n timegap = Tf / (N - 1.0)\n traj = [[None]] * N\n for i in range(N):\n s = mr.QuinticTimeScaling(Tf, timegap * i)\n Rstart, pstart = mr.TransToRp(Xstart)\n Rend, pend = mr.TransToRp(Xend)\n traj[i] = np.r_[np.c_[np.dot(Rstart, \\\n mr.MatrixExp3(mr.MatrixLog3(np.dot(np.array(Rstart).T,Rend)) * s)), \\\n s * np.array(pend) + (1 - s) * np.array(pstart)], \\\n [[0, 0, 0, 1]]]\n # traj[i] = np.dot(Xstart, mr.MatrixExp6(mr.MatrixLog6(np.dot(mr.TransInv(Xstart), Xend)) * s))\n output = traj[i][:-1,:-1].flatten()\n output = np.append( output, traj[i][:,-1][:-1].flatten())\n output = np.append(output, gripper_state)\n write.writerow( output)\n \n\ndef trajectory_generator_main( X_sc_init = [], X_se_init = []):\n '''\n there are 8 segments in the trajectory. \n 0. open the gripper\n 1. initial pose to standoff (a few cm above ground) (3rd or 5th order polynomial) (3s)\n 2. standoff down to cube (up and down motion)(1s)\n 3.(grasp) (1s)\n 4. cube back up to stand off (1s)\n 5. first standoff to 2nd standoff(3s)\n 6. 2nd stand off desired location (1s)\n 7.(open)(1s)\n 8. back to the 2nd stand off(1s)\n '''\n ##T0 = 3.5\n T1 = 7.5\n T2 = 3.5\n T3 = 3.75\n T4 = T2\n T5 = 13.5\n T6 = T2\n #T7 = T3\n T8 = T4\n Tn = T2\n delta_t = 0.01\n\n #initial and desired end positions of the cube. Cube is 5cm tall.\n if len( X_sc_init) == 0:\n X_sc_init = np.array([[1.0, 0.0, 0.0, 1.0],\n [0.0, 1.0, 0.0, 0.0], ##0.2576\n [0.0, 0.0, 1.0, 0.025],\n [0.0, 0.0, 0.0, 1.0]])\n\n X_sc_goal = np.array([[0.0, 1.0, 0.0, -0.2576],\n [-1.0, 0.0, 0.0, -1.0],\n [0.0, 0.0, 1.0, 0.025],\n [0.0, 0.0, 0.0, 1.0]])\n\n #Desired initial position of the mobile base\n X_sb_init = np.array([[1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.1845],\n [0.0, 0.0, 0.0, 1.0]])\n\n #Fixed offset bw the mobile base and the arm\n X_b0 = np.array([[1.0, 0.0, 0.0, -0.00024],\n [0.0, 1.0, 0.0, 0.142],\n [0.0, 0.0, 1.0, 0.3203],\n [0.0, 0.0, 0.0, 1.0]])\n\n #Initial end effector configuration\n M_0e = np.array([[1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n # 180 deg Rotation about x \n Rotation_x_180 = np.array([[1.0, 0.0, 0.0, 0.0],\n [0.0, math.cos(179 * math.pi/180), math.sin(179 * math.pi/180), 0.0],\n [0.0,-math.sin(179 * math.pi/180), math.cos(179 * math.pi/180), 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n # 180 deg Rotation about y \n Rotation_y_180 = np.array([[math.cos(179 * math.pi/180), 0.0, -math.sin(179 * math.pi/180), 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [math.sin(179 * math.pi/180), 0.0, math.cos(179 * math.pi/180), 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n # 180 deg Rotation about z \n Rotation_z_180 = np.array([[math.cos(179 * math.pi/180), math.sin(179 * math.pi/180), 0.0, 0.0],\n [-math.sin(179 * math.pi/180), math.cos(179 * math.pi/180), 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n # 90 deg Rotation about x \n Rotation_x_90 = np.array([[1.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, -1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]]) \n # 90 deg Rotation about y \n Rotation_y_90 = np.array([[0.0, 0.0, -1.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]]) \n # 90 deg Rotation about z \n Rotation_z_90 = np.array([[math.cos(90 * math.pi/180), math.sin(90 * math.pi/180), 0.0, 0.0],\n [-math.sin(90 * math.pi/180), math.cos(90 * math.pi/180), 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]]) \n Rotation_z_90_cc = np.array([[math.cos(90 * math.pi/180), -math.sin(90 * math.pi/180), 0.0, 0.0],\n [math.sin(90 * math.pi/180), math.cos(90 * math.pi/180), 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]]) \n\n #From above, we can get the end effector at each segment X_se\n if len(X_se_init) == 0:\n X_se_init = X_sb_init.dot( X_b0).dot(M_0e)\n\n #open gripper at the origin\n #X_se_0 = np.copy(X_se_init)\n #N0 = T0/delta_t + 1 -2.4790e-01 -1.0000e+00\n #gripper_state_0 = 0\n\n ##X_se_x = np.copy(X_se_init)\n ##X_se_x[1][3] -= 0.5152\n Nn = Tn/delta_t+1\n ##gripper_state_1 = 0 \n\n ##X_se_y = np.copy(X_se_x)\n ##X_se_y[0][3] += 1.0000\n ##Nn = Tn/delta_t+1\n ##gripper_state_1 = 0 \n\n #stand off 1\n X_se_1 = np.copy(X_se_init)\n #rotate about x axis by 180 deg. \n\n X_se_1 = X_se_1.dot( Rotation_z_90 )\n ##X_se_1[1][3] -= 0.2576 ##0.5152 +2.5760e-01\n # X_se_1[2][3] += 0.78 #73\n # X_se_1[0][3] += 0.26\n N1 = T1/delta_t+1\n gripper_state_1 = 0\n \n \n\n #2 Come down to cube initial position\n X_se_2 = np.copy( X_sc_init)\n X_se_2 = X_se_2.dot( Rotation_x_180 )\n X_se_2 = X_se_2.dot( Rotation_z_90_cc )\n X_se_2[0][3] = X_sc_init[0][3]+ 0.2576\n X_se_2[1][3] = X_sc_init[1][3]- 0.01\n X_se_2[2][3] += 0.73\n # X_se_1[0][3] += 0.26\n # X_se_2[2][3] += 0.33\n N2 = T2/delta_t+1\n gripper_state_2 = 0\n\n # 3 Close Gripper\n X_se_3 = np.copy(X_se_2)\n X_se_3[2][3] -= 0.33\n N3 = T3/delta_t+1\n gripper_state_3 = 0\n\n #4 Coming back up to stand_off 1\n X_se_4 = np.copy( X_se_3)\n N4 = T4/delta_t+1\n gripper_state_4 = 1\n\n #5 Going to stand_off 2\n X_se_5 = np.copy(X_sc_goal)\n #X_se_5 = Rotation_y.dot(X_se_5)\n X_se_5 = X_se_5.dot( Rotation_y_180 )\n X_se_5 = X_se_5.dot( Rotation_z_90 )\n X_se_5[0][3] = X_sc_goal[0][3]+ 0.2576\n X_se_5[1][3] = X_sc_goal[1][3]- 0.2576\n X_se_5[2][3] +=0.75\n N5 = T5/delta_t+1\n gripper_state_5 = 1\n\n #6 going to goal location\n X_se_6 = np.copy(X_se_5)\n X_se_6[2][3] -=0.33\n N6 = T6/delta_t+1\n gripper_state_6 = 1\n\n #7 Open Gripper\n #X_se_7 = np.copy(X_se_6)\n #N7 = T7/delta_t+1\n #gripper_state_7 = 0\n\n #8 Come back up to stand off 2\n X_se_8 = np.copy( X_se_5)\n N8 = T8/delta_t+1\n gripper_state_8 = 0\n \n\n with open('trajectory.csv',mode='w') as csv_file:\n write = csv.writer(csv_file,delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n\n ##TrajectoryGenerator(X_se_init, X_se_init, T0, N0, gripper_state_0, write)\n ##TrajectoryGenerator(X_se_init, X_se_x, T1, N1, gripper_state_1, write)\n ##TrajectoryGenerator(X_se_x , X_se_y, Tn, Nn, gripper_state_2, write) \n TrajectoryGenerator(X_se_init , X_se_1, Tn, Nn, gripper_state_2, write)\n TrajectoryGenerator(X_se_1 , X_se_2, T2, N2, gripper_state_2, write)\n # TrajectoryGenerator(X_se_x , X_se_2, Tn, Nn, gripper_state_2, write)\n TrajectoryGenerator(X_se_2 , X_se_3, T3, N3, gripper_state_3, write)\n TrajectoryGenerator(X_se_3 , X_se_4, T4, N4, gripper_state_4, write)\n TrajectoryGenerator(X_se_4 , X_se_5, T5, N5, gripper_state_5, write)\n TrajectoryGenerator(X_se_5 , X_se_6, T6, N6, gripper_state_6, write)\n #TrajectoryGenerator(X_se_6 , X_se_7, T7, N7, gripper_state_7, write)\n TrajectoryGenerator(X_se_6 , X_se_8, T8, N8, gripper_state_8, write)\n\ndef get_initial_cube_poses():\n #initial and desired end positions of the cube. Cube is 5cm tall. \n X_sc_init = np.array([[1,0,0,1],\n [0,1,0,0],\n [0,0,1,0.025],\n [0,0,0,1]]) \n\n X_sc_goal = np.array([[0,1,0,0],\n [-1,0,0,-1],\n [0,0,1,0.025],\n [0,0,0,1]])\n \n return X_sc_init, X_sc_goal\n\ndef get_initial_desired_robot_poses():\n #Desired initial position of the mobile base\n X_sb_init = np.array([[1,0,0,0],\n [0,1,0,0],\n [0,0,1,0.0963],\n [0,0,0,1]])\n\n #Fixed offset bw the mobile base and the arm\n X_b0 = np.array([[1,0,0,0.1662],\n [0,1,0,0],\n [0,0,1,0.0026],\n [0,0,0,1]])\n\n #Initial end effector configuration\n M_0e = np.array([[1,0,0,0.33],\n [0,1,0,0],\n [0,0,1,0.6546],\n [0,0,0,1]])\n\n #From above, we can get the end effector at each segment X_se \n X_se_init = X_sb_init.dot( X_b0).dot(M_0e)\n\n return X_sb_init, X_se_init\n\n\n\nif __name__ == \"__main__\":\n trajectory_generator_main()"
},
{
"alpha_fraction": 0.7372384667396545,
"alphanum_fraction": 0.7539749145507812,
"avg_line_length": 36.125,
"blob_id": "0fdf3a487835c69028f4852a20d1abc0fa521799",
"content_id": "c34851639fd7d2bf0cdcfe827a9a74c0df831702",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1195,
"license_type": "permissive",
"max_line_length": 146,
"num_lines": 32,
"path": "/README.md",
"repo_name": "preetijain7681/Manipulator",
"src_encoding": "UTF-8",
"text": "# Mobile Manipulator\n\nTasks:\n1. Perform Pick and place operation on our 5 DOF mobile manipulator robot. \n2. Implement object detection and classification using the bridge for ROS, YOLO in CoppeliaSim. \n3. Perform dexterous tasks for industrial purposes. \n\n\n# Progress Report\n- [Milestone 1 : Kinematics of Mobile Manipulator](#milestone-1)\n- [Milestone 2 : Trajectory Generation](#milestone-2)\n\n\n\n\n# Milestone 1 \n[**Video of Final Scene**](https://youtu.be/35IWMIz1MxU) \n* Status : **Completed**\n* Description : Odometry for Mobile Manipulator\n* Achieved Tasks :\n1. **Design** : Designing 5 DoF mobile manipulator arm\n2. **Kinematics** : Understanding Kinematics of mobile manipulator chassis\n3. **Odometry** : Implementing Odometry for Mobile Manipulator chassis\n\n\n# Milestone 2\n[**Video of Final Scene**](https://youtu.be/TO-ZnmxWqNg)\n* Status : **Completed**\n* Description : Point-to-Point Trajectory Generation for end effector\n* Achieved Tasks :\n1. **Quintic Time Scaling** : Implemented Quintic time scaling for Trajectory generation\n2. **Discrete Point-to-Point Trajectory Generation** : Computing set of N SE(3) configuration matrices for point-to-point movement of end-effector\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.5496838092803955,
"alphanum_fraction": 0.5826558470726013,
"avg_line_length": 35.60330581665039,
"blob_id": "056628ad01191cc7bcde99397530bf7cf61b229a",
"content_id": "eabbe7d70f7c4fa798c2a296174f8da2b740812b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4428,
"license_type": "permissive",
"max_line_length": 294,
"num_lines": 121,
"path": "/Milestone_1/milestone1.py",
"repo_name": "preetijain7681/Manipulator",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n'''\nThis file calculates the configuration of YouBot after time step delta_t, using first order euler integration\n new arm joint angles = (old arm joint angles) + (joint speeds) * delta_t\n new wheel angles = (old wheel angles) + (wheel speeds) * delta_t \n \nInputs: \n 1. Current configuration\n 2. Joint speed\n 3. Simulation time step delta_t\n 4. Joint speed limits\n \nOutput:\n 1. Configuration of the robot after time step delta_t\n'''\nimport csv\nimport numpy as np\n\ndef Rot_body(phi):\n '''\n Generates a rotational matrix about the z axis by phi for the car. The chassis configuration should be [phi, x, y] \n Input: angle of rotation about z axis phi\n Output: 3x3 rotational matrix\n '''\n c, s = np.cos(phi), np.sin(phi)\n R = np.array([[1,0,0],\\\n [0, c, s],\\\n [0, -s, c]])\n return R\n\ndef NextState(X_current, qdot, delta_t, qdot_lim, gripper_state, write = None):\n '''\n Description is the same as the file description\n \n Inputs: \n 1. X_curent: 12-array representing the current configuration of the robot (3 variables for the chassis configuration [phi, x,y], 5 variables for the arm configuration, and 4 variables for the wheel angles, the wheel sequence start from the left front wheel of Youbot and go clock wise).\n 2. qdot: 9-array of controls indicating the arm joint speeds (5 variables) and the wheel speeds u (4 variables).\n 3. delta_t: a timestep delta_t.\n 4. qdot_lim: 2-array indicating the maximum angular speed of the arm joints and the wheels, [-vmax, +vmax]. \n 5. an CSV write object for writing the result into the CSV file\n Outputs: \n 1. A 12-array representing the configuration of the robot time delta_t later.\n 2. CSV file containing the above 12-array and gripper state \n '''\n \n #configuration variables\n r = 0.075\n # l = 0.47/2.0\n # w = 0.3/2.0\n d = 0.2375\n q_dot = np.copy( qdot )\n #Speed limiting - arm\n for i in range(5):\n if q_dot[i] > qdot_lim[0]: \n q_dot[i] = qdot_lim[0]\n if q_dot[i] < -1.0*qdot_lim[0]:\n q_dot[i] = -1.0*qdot_lim[0]\n\n #Speed limiting - wheels\n for i in range(4):\n if q_dot[5+i] > qdot_lim[1]:\n q_dot[5+i] = qdot_lim[1]\n if q_dot[5+i] < -1.0*qdot_lim[1]:\n q_dot[5+i] = -1.0*qdot_lim[1]\n \n #Joint config updating\n X = np.array( X_current )\n X_new = np.copy(X)\n X_new[3:] += q_dot*delta_t\n\n #Car config updating: X_new[8:] = H0*(R(phi)*X_new[0:3]) --> x_new[0:3] = X[0:3]+R(phi)^T * H0^+ * X[8:]\n H0 = 1/r * np.array([[d, -1, 0],\\\n [-d, 0, 1],\\\n [-d, -1, 0],\\\n [d, 0, 1]]) #H in body frame \n u_increment = (q_dot*delta_t)[5:]\n xb = np.linalg.pinv(H0).dot(u_increment) #car configuration X in body frame\n phi = X[0]\n RT = Rot_body(phi).T\n X_new[0:3] = X_new[0:3] + RT.dot(xb) # RT is pre-multiplied to change the co-ordinates and if post multiplied we would have changed the frame\n write_output = np.append(X_new,gripper_state)\n write.writerow( write_output)\n return X_new\n \n\ndef main():\n '''\n This is a test function for NextState. It tests the robot with sample wheel and joint speed commands. \n '''\n T = 1.0\n delta_t = 0.01\n gripper_state = 1\n X_current = np.zeros(12)\n qdot = np.zeros(9)\n N = T/delta_t\n u_total_1 = np.array([-10,0,-10,0])\n u_total_2 = np.array([0,30,0,30])\n u_total_3 = np.array([-10,10,10,-10])\n qdot_lim = np.array([10,np.pi])/N\n\n\n with open('config_update.csv',mode='w') as csv_file:\n write = csv.writer(csv_file,delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n for i in np.arange(N): \n wheels_speed1 = u_total_1/T\n qdot[5:] = wheels_speed1\n X_current = NextState(X_current, qdot, delta_t, qdot_lim, gripper_state, write)\n\n for i in np.arange(N):\n wheels_speed2 = u_total_2/T\n qdot[5:] = wheels_speed2\n X_current = NextState(X_current, qdot, delta_t, qdot_lim, gripper_state, write)\n \n \n for i in np.arange(N):\n wheels_speed3 = u_total_3/T\n qdot[5:] = wheels_speed3\n X_current = NextState(X_current, qdot, delta_t, qdot_lim, gripper_state, write)\n \nif __name__ == \"__main__\":\n main()"
}
] | 3 |
DiegoTeixeiraMarques/DataScience | https://github.com/DiegoTeixeiraMarques/DataScience | eb04bfb822eecb133e968ccb32768f41d3b0cc93 | 09074ea10f927eaa644b632eec959bec25861f22 | 58c6a8e3e1fd16d55f045c05a050b84342c4b854 | refs/heads/master | 2023-02-02T19:22:49.867682 | 2023-01-29T23:43:08 | 2023-01-29T23:43:08 | 258,811,292 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5439296960830688,
"alphanum_fraction": 0.5682907104492188,
"avg_line_length": 32.15999984741211,
"blob_id": "536493f93c0eb43b1a19084f56a296b45d39cbe4",
"content_id": "4b8e924cd02d76cda3fbc0601414285bea631744",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2512,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 75,
"path": "/0 - Algorithms/percepton.py",
"repo_name": "DiegoTeixeiraMarques/DataScience",
"src_encoding": "UTF-8",
"text": "pesos = [1, 1, 1]\nvies = 1\ncoeficiente = 0.3\nDR = 0\nmatriz = [[0, 0, -1, \"null\"], [0, 1, -1, \"null\"], [1, 0, -1, \"null\"], [1, 1, 1, \"null\"]]\nfim = False\n#matriz = [[0, 0, -1, \"null\"], [0, 1, 1, \"null\"], [1, 0, 1, \"null\"], [1, 1, -1, \"null\"]]\n\ndef percepton(indice):\n # Faz a soma dos produtos dos X pelos Pesos\n soma = matriz[indice][0] * pesos[0] + matriz[indice][1] * pesos[1] + vies * pesos[2]\n # Excecuta a função de ativação\n classe = ativacao(soma) \n # Verifica se os pesos atendem ao resultado esperado\n if classe != matriz[indice][2]:\n # Reinicia a matriz para cálculo de novos pesos\n reiniciarMatriz(indice)\n print(\"Indice: \", indice)\n # Calcula o erro\n DR = calcularDiferenca(matriz[indice][2], classe)\n print(\"DR: \", DR)\n # Recalcula os pesos\n recalculoPeso(indice, DR)\n else:\n # Atribui ok se os pesos condicionarem ao resultado esperado\n matriz[indice][3] = \"ok\"\n # Verifcia se a matriz está com todos os P atendidos pelos pesos\n return verificaConclusao()\n return False\n\ndef verificaConclusao():\n for i in range(len(matriz)):\n if matriz[i][3] != \"ok\":\n return False\n #print(\"Matriz: \", matriz)\n return True\n\ndef reiniciarMatriz(indice):\n # Atribui null a todos os P da matriz, pois o peso não satisfez o resultado esperado\n for i in range(len(matriz)):\n matriz[i][3] = \"null\"\n\ndef ativacao(soma):\n if soma >= 0:\n return 1\n else:\n return -1\n\ndef calcularDiferenca(desejado, obtido):\n print(\"Desejado: \", desejado)\n print(\"Obtido: \", obtido)\n return desejado - obtido\n\n\ndef recalculoPeso(indice, DR):\n #print(pesos[0], \" + \", coeficiente, \" * \", DR, \" * \", matriz[indice][0])\n #print(pesos[1], \" + \", coeficiente, \" * \", DR, \" * \", matriz[indice][1])\n #print(pesos[2], \" + \", coeficiente, \" * \", DR, \" * \", vies)\n pesos[0] = round(pesos[0] + coeficiente * DR * matriz[indice][0], 2)\n pesos[1] = round(pesos[1] + coeficiente * DR * matriz[indice][1], 2)\n pesos[2] = round(pesos[2] + coeficiente * DR * vies, 2)\n print(\"Pesos: \", pesos)\n print(\"------------------------\")\n\ndef pegarIndice():\n # Retorna o índice do primeiro P que encontrar null\n for indice in range(len(matriz)):\n if matriz[indice][3] == \"null\":\n return indice\n\nif __name__ == '__main__':\n\n while fim == False:\n indice = pegarIndice()\n fim = percepton(indice)\n\n \n\n \n "
},
{
"alpha_fraction": 0.508483350276947,
"alphanum_fraction": 0.5463988780975342,
"avg_line_length": 28.58974266052246,
"blob_id": "40614793361aead230ac51be0cc7e354649a5700",
"content_id": "661566f6932a0ad203f8c22e1b665207406d0933",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5786,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 195,
"path": "/0 - Algorithms/k-means.py",
"repo_name": "DiegoTeixeiraMarques/DataScience",
"src_encoding": "UTF-8",
"text": "import random\nimport math\n\n# Algoritmo K-means\n\npessoa1 = ['Maria', 1, 1]\npessoa2 = ['João', 9.4, 6.4]\npessoa3 = ['José', 2.5, 2.1]\npessoa4 = ['Antonio', 8, 7.7]\npessoa5 = ['Francisco', 0.5, 2.2]\npessoa6 = ['Xico', 7.9, 8.4]\npessoa7 = ['Mari', 7, 7]\npessoa8 = ['Zé', 2.8, 0.8]\npessoa9 = ['Jo', 1.2, 3]\npessoa10 = ['Wilton', 7.8, 6.1]\n#pessoa11 = ['Outro', 'Sem pelo', 'mamifero']\n#pessoa12 = ['Outro2', 'Com pelo', 'mamifero']\n\ndataSetProc = []\ndataSet = []\nxLista = []\nyLista = []\ncentroid1 = [0, 0]\ncentroid2 = [0, 0]\n\ndef criarDataSet():\n dataSet.append([pessoa1, 0])\n dataSet.append([pessoa2, 0])\n dataSet.append([pessoa3, 0])\n dataSet.append([pessoa4, 0])\n dataSet.append([pessoa5, 0])\n dataSet.append([pessoa6, 0])\n dataSet.append([pessoa7, 0])\n dataSet.append([pessoa8, 0])\n dataSet.append([pessoa9, 0])\n dataSet.append([pessoa10, 0])\n #dataSet.append([pessoa11, 0])\n #dataSet.append([pessoa12, 0])\n\ndef remove_repetidos(lista):\n l = []\n for i in lista:\n if i not in l:\n l.append(i)\n #l.sort()\n return l\n\ndef atribuirRotulos(dataSet, xLista, yLista):\n\n # Separa os atributos em duas listas para posteriormente atriburi os rotulos\n for i in range(len(dataSet)):\n\n xLista.append(dataSet[i][0][1])\n yLista.append(dataSet[i][0][2])\n\n # Remove valores duplicados para atribuir os rotulos\n xLista = remove_repetidos(xLista)\n yLista = remove_repetidos(yLista)\n\n rotulo = 1\n for i in range(len(xLista)):\n idade = xLista[i]\n xLista[i] = (idade, rotulo)\n rotulo = rotulo + 1\n\n rotulo = 1\n for i in range(len(yLista)):\n idade = yLista[i]\n yLista[i] = (idade, rotulo)\n rotulo = rotulo + 1\n\n for i in range(len(dataSet)):\n nome = dataSet[i][0][0]\n idade = dataSet[i][0][1]\n salario = dataSet[i][0][2]\n\n for i in range(len(xLista)):\n if type(idade) == int or type(idade) == float:\n x = idade\n break\n else: \n xLista[i][0] == idade\n x = xLista[i][1] \n for i in range(len(yLista)):\n if type(salario) == int or type(salario) == float:\n y = salario\n break\n else:\n yLista[i][0] == salario\n y = yLista[i][1]\n \n dataSetProc.append([nome, x, y, 0])\n\n return xLista, yLista, dataSetProc\n\n\ndef calcularCentroids(xLista, yLista, centroid1, centroid2, dataSetProc = []):\n\n if centroid1 == [0, 0] and centroid2 == [0, 0]:\n centroid1 = [random.randrange(1, len(xLista)), random.randrange(1, len(yLista))]\n while centroid2 == [0, 0] or centroid1 == centroid2:\n centroid2 = [random.randrange(1, len(xLista)), random.randrange(1, len(yLista))]\n else:\n qtd = 0\n total = 0\n #Média X no cluster 1\n for i in range(len(dataSetProc)):\n if dataSetProc[i][3] == 1:\n qtd = qtd + 1\n total = total + dataSetProc[i][1]\n if qtd == 0:\n centroid1[0] = centroid1[0]\n else:\n centroid1[0] = total/qtd\n\n qtd = 0\n total = 0\n #Média Y no cluster 1\n for i in range(len(dataSetProc)):\n if dataSetProc[i][3] == 1:\n qtd = qtd + 1\n total = total + dataSetProc[i][2]\n if qtd == 0:\n centroid1[1] = centroid1[1]\n else:\n centroid1[1] = total/qtd\n\n qtd = 0\n total = 0\n #Média X no cluster 2\n for i in range(len(dataSetProc)):\n if dataSetProc[i][3] == 2:\n qtd = qtd + 1\n total = total + dataSetProc[i][1]\n if qtd == 0:\n centroid2[0] = centroid2[0]\n else:\n centroid2[0] = total/qtd\n\n qtd = 0\n total = 0\n #Média X no cluster 1\n for i in range(len(dataSetProc)):\n if dataSetProc[i][3] == 2:\n qtd = qtd + 1\n total = total + dataSetProc[i][2]\n if qtd == 0:\n centroid2[1] = centroid2[1]\n else:\n centroid2[1] = total/qtd\n \n return centroid1, centroid2\n\ndef calcularDistancias(dataSetProc, centroid1, centroid2):\n verificador = False\n for i in range(len(dataSetProc)):\n \n distancia1 = math.sqrt(((centroid1[1] - dataSetProc[i][2]) ** 2) + ((centroid1[0] - dataSetProc[i][1]) ** 2))\n distancia2 = math.sqrt(((centroid2[1] - dataSetProc[i][2]) ** 2) + ((centroid2[0] - dataSetProc[i][1]) ** 2))\n\n if distancia1 < distancia2:\n #Verifica se mudou de cluster\n if dataSetProc[i][3] != 1:\n verificador = True\n #Atribui o cluster\n dataSetProc[i][3] = 1\n else:\n #Verifica se mudou de cluster\n if dataSetProc[i][3] != 2:\n verificador = True\n #Atribui o cluster\n dataSetProc[i][3] = 2\n #print('Verificador: ', verificador)\n #print(dataSetProc)\n #print('***************************************')\n return verificador, dataSetProc\n\ncriarDataSet()\nxLista, yLista, dataSetProc = atribuirRotulos(dataSet, xLista, yLista)\n\ncentroid1, centroid2 = calcularCentroids(xLista, yLista, centroid1, centroid2, dataSetProc)\nverificador = True\n\n# Verifica se houve mudança de cluster\nwhile verificador == True:\n \n # Calcula as distâncias para os centroids\n verificador, dataSetProc = calcularDistancias(dataSetProc, centroid1, centroid2)\n # Recalcula a média dos centroids\n centroid1, centroid2 = calcularCentroids(xLista, yLista, centroid1, centroid2, dataSetProc)\n \n#print(dataSetProc)\n\nfor i in range(len(dataSetProc)):\n print(dataSetProc[i])\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.6409965753555298,
"alphanum_fraction": 0.6477916240692139,
"avg_line_length": 22.263158798217773,
"blob_id": "1b0ad957fdf086d3c64dfbc85f79349697089e75",
"content_id": "82d674eaaafea07b866057d1a38049a1cb37a5ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 884,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 38,
"path": "/analise jupyter/debug.py",
"repo_name": "DiegoTeixeiraMarques/DataScience",
"src_encoding": "UTF-8",
"text": "# importando os módulos (bibliotecas)\nimport pandas as pd\n\n# Import DS Jair\ndfj = pd.read_json(\"jairbolsonaro.json\")\n\n#Carregando Data set das 'STOP WORDS'\nsw = pd.read_csv(\"stopwords.txt\", header = None, names=[\"Palavra\"])\n\nPalavras_N = sw['Palavra'].str.split()\n\nPalavras_jair = dfj['full_text'].str.split()\n\n#print(type(Palavras_N))\n#print(type(Palavras_jair))\n\ndictj = dict()\n\nfor Frases in Palavras_jair:\n for Palavras in Frases:\n if Palavras in dictj:\n dictj[Palavras] = dictj[Palavras] + 1\n else: \n dictj[Palavras] = 1\n\n#print('dictj: ', dictj)\n\ndfj = pd.DataFrame(list(dictj.items()), columns=['Palavra', 'QTD'])\n#sw = pd.DataFrame(sw, columns=['Palavra'])\n\nprint(len(dfj))\n\nprint(\"sw: \",sw)\n\nndfj = dfj[~dfj.Palavra.isin(sw.Palavra.values)] \nprint(ndfj.sort_values(['QTD'],ascending=False).head(1000))\n\nprint(len(ndfj))"
},
{
"alpha_fraction": 0.628139078617096,
"alphanum_fraction": 0.642949104309082,
"avg_line_length": 33.511112213134766,
"blob_id": "66b6a0098ad256d9d64cfb198b395861d557bdd7",
"content_id": "7c3e1e489b862b5c6e926409389576facb17e226",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 3109,
"license_type": "no_license",
"max_line_length": 257,
"num_lines": 90,
"path": "/6 - Linguagem R/Aula 18.04.20/terremoto_exemplo.R",
"repo_name": "DiegoTeixeiraMarques/DataScience",
"src_encoding": "UTF-8",
"text": "## Aplicação Web!!!\n# Dataset reference: https://www.kaggle.com/usgs/earthquake-database/data\n\n### Limpando Plots, Console and Ambiente\nrm(list = ls())\ndev.off(dev.list()[\"RStudioGD\"])\ncat(\"\\014\")\n\n#install.packages(\"shiny\")\n#install.packages(\"leaflet\")\n#install.packages(\"dplyr\")\n\n# Escolher diretório e pasta dos dados\nsetwd(\"database.csv\")\n\n#load libraries\nlibrary(shiny)\nlibrary(leaflet)\nlibrary(dplyr)\nlibrary(leaflet.extras)#import data\ndata <- read.csv(\"database.csv\")\n\n#categorize earthquake depth\ndata$depth_type <- ifelse(data$Depth <= 70, \"shallow\", ifelse(data$Depth <= 300 | data$Depth >70, \"intermediate\", ifelse(data$Depth > 300, \"deep\", \"other\")))\n\n# UI and Server\nui <- fluidPage(\n mainPanel( \n #this will create a space for us to display our map\n leafletOutput(outputId = \"mymap\"), #this allows me to put the checkmarks ontop of the map to allow people to view earthquake depth or overlay a heatmap\n absolutePanel(top = 100, left = 60, \n checkboxInput(\"markers\", \"Depth\", FALSE),\n checkboxInput(\"heat\", \"Heatmap\", FALSE)\n )\n ))\n\nserver <- function(input, output, session) {\n \n #define the color pallate for the magnitidue of the earthquake\n pal <- colorNumeric(\n palette = c('gold', 'orange', 'dark orange', 'orange red', 'red', 'dark red'),\n domain = data$Magnitude)\n \n #define the color of for the depth of the earquakes\n pal2 <- colorFactor(\n palette = c('blue', 'yellow', 'red'),\n domain = data$depth_type\n )\n \n #create the map\n output$mymap <- renderLeaflet({\n leaflet(data) %>% \n setView(lng = -99, lat = 45, zoom = 2) %>% #setting the view over ~ center of North America\n addTiles() %>% \n addCircles(data = data, lat = ~ Latitude, lng = ~ Longitude, weight = 1, radius = ~sqrt(Magnitude)*25000, popup = ~as.character(Magnitude), label = ~as.character(paste0(\"Magnitude: \", sep = \" \", Magnitude)), color = ~pal(Magnitude), fillOpacity = 0.5)\n })\n \n#next we use the observe function to make the checkboxes dynamic. \n#If you leave this part out you will see that the checkboxes, when clicked \n#on the first time, display our filters...But if you then uncheck them they stay on. \n#So we need to tell the server to update the map when the checkboxes are unchecked.\n\n observe({\n proxy <- leafletProxy(\"mymap\", data = data)\n proxy %>% clearMarkers()\n if (input$markers) {\n proxy %>% addCircleMarkers(stroke = FALSE, color = ~pal2(depth_type), fillOpacity = 0.2, label = ~as.character(paste0(\"Magnitude: \", sep = \" \", Magnitude))) %>%\n addLegend(\"bottomright\", pal = pal2, values = data$depth_type,\n title = \"Depth Type\",\n opacity = 1)}\n else {\n proxy %>% clearMarkers() %>% clearControls()\n }\n })\n \n observe({\n proxy <- leafletProxy(\"mymap\", data = data)\n proxy %>% clearMarkers()\n if (input$heat) {\n proxy %>% addHeatmap(lng=~Longitude, lat=~Latitude, intensity = ~Magnitude, blur = 10, max = 0.05, radius = 15) \n }\n else{\n proxy %>% clearHeatmap()\n }\n \n })\n \n}\n \nshinyApp(ui, server)\n"
}
] | 4 |
marcosvr17101999/gestionAlmacen | https://github.com/marcosvr17101999/gestionAlmacen | 935e428c7de8379cfb6f8b4aa743ad11047f292c | 30636b40d9273a12346ed9cfe9fbaf58f4c29b47 | 60d69a8b7bce7c51b41afa6fbacaecb0c2220790 | refs/heads/master | 2023-04-25T07:37:23.056102 | 2021-05-23T16:22:40 | 2021-05-23T16:22:40 | 365,781,407 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6577407121658325,
"alphanum_fraction": 0.664834201335907,
"avg_line_length": 39.86231994628906,
"blob_id": "b945bfe8bbb1afbbf56fd29aea35e8d647e98f9d",
"content_id": "6ca1ba4418a15c01d0d743535c0b8d0c369af233",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5639,
"license_type": "no_license",
"max_line_length": 166,
"num_lines": 138,
"path": "/app.py",
"repo_name": "marcosvr17101999/gestionAlmacen",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template,request,url_for,redirect\nfrom flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy import func\n\napp = Flask(__name__)\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///database/almacen.db'\ndb = SQLAlchemy(app)\n\n#Creacion de las tablas de la base de datos \nclass Rol(db.Model):\n __tablename__ = \"ROL\"\n id = db.Column(db.Integer,primary_key = True)\n rol = db.Column(db.String(200),unique = True)\n\nclass Usuario(db.Model):\n __tablename__ = \"USUARIO\"\n id = db.Column(db.Integer,primary_key = True)\n user = db.Column(db.String(200))\n password = db.Column(db.String(100))\n rol = db.Column(db.Integer,db.ForeignKey(\"ROL.id\"))\n\nclass Proveedor(db.Model):\n __tablename__ = \"PROVEEDOR\"\n id = db.Column(db.Integer,primary_key = True)\n idUsuario = db.Column(db.Integer,db.ForeignKey(\"USUARIO.id\"))\n nombre = db.Column(db.String(200))\n tlfn = db.Column(db.String(30))\n cif = db.Column(db.String(30))\n\nclass Producto(db.Model):\n __tablename__ = \"PRODUCTO\"\n id = db.Column(db.Integer,primary_key = True)\n producto = db.Column(db.String(200))\n color = db.Column(db.String(200))\n cantidad = db.Column(db.Integer)\n descripcion = db.Column(db.String(300))\n precio = db.Column(db.Integer)\n lugar = db.Column(db.String(200))\n proveedor = db.Column(db.Integer,db.ForeignKey(\"PROVEEDOR.id\"))\n cantidadMax = db.Column(db.Integer)\n\nclass CompraCliente(db.Model):\n __tablename__ = \"COMPRACLIENTE\"\n id = db.Column(db.Integer,primary_key = True)\n idProducto = db.Column(db.Integer,db.ForeignKey(\"PRODUCTO.id\"))\n idCliente = db.Column(db.Integer,db.ForeignKey(\"USUARIO.id\"))\n cantidad = db.Column(db.Integer)\n precio = db.Column(db.Integer)\n fecha = db.Column(db.Date)\n\nclass CompraProveedor(db.Model):\n __tablename__ = \"COMPRAPROVEEDOR\"\n id = db.Column(db.Integer,primary_key = True)\n idProducto = db.Column(db.Integer,db.ForeignKey(\"PRODUCTO.id\"))\n idProveedor = db.Column(db.Integer,db.ForeignKey(\"PROVEEDOR.id\"))\n cantidad = db.Column(db.Integer)\n precio = db.Column(db.Integer)\n fecha = db.Column(db.Date)\n\n\n\n#Ejecuta la cracion de datos en la base de datos \ndb.create_all()\ndb.session.commit()\n\n\[email protected](\"/\")\ndef home():\n return render_template(\"index.html\")\[email protected](\"/exit\",methods=[\"Post\"])\ndef exit():\n return render_template('index.html')\n\[email protected](\"/back/<id>\",methods=['Post'])\ndef back(id):\n cliente = db.session.query(Usuario).filter_by(id=id).first()\n\n todosProductos = Producto.query.all()\n usuarios = Usuario.query.all()\n proveedores = Proveedor.query.all()\n return render_template(\"admin.html\",listProveedores=proveedores,listUsuarios=usuarios, productos=todosProductos, usuario=cliente)\n \[email protected](\"/verProducto/<id>/<user>\")\ndef verProducto(id,user):\n pro = db.session.query(Producto).filter_by(id=int(id)).first()\n cliente = db.session.query(Usuario).filter_by(id=user).first()\n\n return render_template(\"verProducto.html\",produ=pro,usuario=cliente)\n\[email protected](\"/login\",methods=['Post'])\n#Funcion para logearte\ndef login():\n try:\n usuario = request.form[\"loginUsuario\"]\n passw = request.form[\"loginPassword\"]\n cliente = db.session.query(Usuario).filter_by(user=usuario).first()\n if (cliente.rol == 1 ) and cliente.password == passw:\n todosProductos = Producto.query.all()\n usuarios = Usuario.query.all()\n proveedores = Proveedor.query.all()\n return render_template(\"admin.html\",listProveedores=proveedores,listUsuarios=usuarios, productos=todosProductos, usuario=cliente)\n elif(cliente.rol == 2) and cliente.password == passw:\n id = int(cliente.id)\n compras = db.session.query(CompraCliente).filter_by(idCliente=id).order_by(CompraCliente.fecha.desc()).all() \n suma = db.session.query(CompraCliente,func.sum(CompraCliente.precio)).filter_by(idCliente=id).all()\n precioTotal = suma[0][1]\n if(precioTotal==None):\n precioTotal=0\n todosProductos = Producto.query.all()\n return render_template(\"cliente.html\", productos=todosProductos, usuario=cliente,listacompras=compras,precio=precioTotal)\n elif(cliente.rol == 3) and cliente.password == passw:\n prove = db.session.query(Proveedor).filter_by(idUsuario=cliente.id).first()\n compras = db.session.query(CompraProveedor).filter_by(idProveedor=prove.id).order_by(CompraProveedor.fecha.desc()).all() \n suma = db.session.query(CompraProveedor,func.sum(CompraProveedor.precio)).filter_by(idProveedor=prove.id).all()\n cant = db.session.query(CompraProveedor,func.sum(CompraProveedor.cantidad)).filter_by(idProveedor=prove.id).all()\n cantidad = cant[0][1]\n precioTotal = suma[0][1]\n if(precioTotal == None):\n precioTotal = 0\n if(cantidad == None):\n cantidad = 0\n todosProductos = db.session.query(Producto).filter_by(proveedor=prove.id)\n return render_template(\"proveedor.html\", productos=todosProductos, usuario=cliente,provee=prove,listacompras=compras,precio=precioTotal,cantidad=cantidad)\n else:\n return redirect(url_for('home'))\n\n except AttributeError as e:\n print(\"error en los datos\")\n return redirect(url_for('home'))\n except Exception as e:\n print(e)\n return redirect(url_for('home'))\n\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n"
},
{
"alpha_fraction": 0.5454545617103577,
"alphanum_fraction": 0.7272727489471436,
"avg_line_length": 17.66666603088379,
"blob_id": "7e449c06b5738d9621773ef985fbc55a77c52b48",
"content_id": "ddcbac041e0dc2bd324cf6bdb69b6dfc3ab7b658",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 55,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 3,
"path": "/requirements.txt",
"repo_name": "marcosvr17101999/gestionAlmacen",
"src_encoding": "UTF-8",
"text": "Flask~=1.1.2\nSQLAlchemy~=1.4.14\nflask_sqlalchemy~=2.5.1"
},
{
"alpha_fraction": 0.8194607496261597,
"alphanum_fraction": 0.8311840295791626,
"avg_line_length": 64.69230651855469,
"blob_id": "08a6718af752b07dee74cbec96b1aeacf420b132",
"content_id": "8f33ce7df591d628a32bdd9c03dc9d1e8f8d5a19",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 853,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 13,
"path": "/README.md",
"repo_name": "marcosvr17101999/gestionAlmacen",
"src_encoding": "UTF-8",
"text": "# gestionAlmacen\nProyecto Final\nProyecto Final del curso de Programacion Python en el cual se trabajara con Flask y SQLAlchemy\nLa aplicacion consiste en crear una aplicacion web que ayude a una empresa de suministros informaticos.\nLa aplicacion servira como base de datos y como gestion de datos de la empresa como para los proveedores.\n\nPara poder tener la aplicacion en vuestro sistema podeis usar el siguiente comando en el lugar donde querais almacenarlo:\ngit clone https://github.com/marcosvr17101999/gestionAlmacen.git\n\nCuando tengas ya el proyecto en tu zona de trabajo deberas crear un entorno de trabajo con python 3.9\nAl tener ya tu entorno solo faltaria instalar las librerias que se instalaran automaticamente con el siguiente comando:\npip install -r requirements.txt\nCon esto ya estaria la aplicacion disponible para funcionar correctamente"
}
] | 3 |
dancingfoot/mpv-videowall | https://github.com/dancingfoot/mpv-videowall | 97a8137034c8342d5c1b4a11f627a35de50adda2 | 77f971d9e1689ca5a13db934e53897626df89cb4 | f78f3a269f2401b5200beb479fc7e4d1bdbb36c9 | refs/heads/master | 2020-06-07T21:38:05.466115 | 2017-09-08T10:54:21 | 2017-09-08T10:54:21 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.649350643157959,
"alphanum_fraction": 0.6753246784210205,
"avg_line_length": 37.5,
"blob_id": "80616454c6e63f1fc96392bdca0c0ab55b2ccf82",
"content_id": "8b5d6bf097c41167e50d4e800863629d9599aaeb",
"detected_licenses": [
"LicenseRef-scancode-sata"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 77,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 2,
"path": "/util/videotime.sh",
"repo_name": "dancingfoot/mpv-videowall",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\nffprobe -i $1 -show_entries format=duration -v quiet -of csv=\"p=0\"\n"
},
{
"alpha_fraction": 0.57201087474823,
"alphanum_fraction": 0.592391312122345,
"avg_line_length": 28.440000534057617,
"blob_id": "2f80e41c3aaa92fe18fbdf2e79c8b026b3fb64c8",
"content_id": "eecac7eb97728dc8d3071a0b925b950f0277a9ff",
"detected_licenses": [
"LicenseRef-scancode-sata"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1472,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 50,
"path": "/master.py",
"repo_name": "dancingfoot/mpv-videowall",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport logging\nimport socket\nimport struct\nimport sys\nimport time\n\n\ndef broadcast_pos(sock: socket.socket, pos: float, clients: [(str, int)]) -> [int]:\n data = struct.pack('!d', pos)\n sent = []\n for client in clients:\n sent.append(sock.sendto(data, client))\n for client, written in zip(clients, sent):\n if written != 8:\n logging.error(\"Cannot send to '%s:%d'\" % client)\n return sent\n\n\ndef main(argv: [str]) -> int:\n logging.basicConfig(level=logging.INFO)\n if len(argv) < 3:\n sys.stdout.write('Usage: ./master.py length client1 port1 [client2 port2 ...]\\n\\n')\n return 1\n logging.info(\"Setting length to '%s'\" % argv[1])\n length = float(argv[1])\n clients = []\n for addr, port in zip(argv[2::2], argv[3::2]):\n logging.info(\"Adding client '%s:%s'\" % (addr, port))\n clients.append((addr, int(port)))\n sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)\n\n while True: # Loop playback\n start_time = time.monotonic()\n pos = 0\n broadcast_pos(sock, 0, clients)\n logging.info('Position: 0.000000000')\n while pos < length:\n time.sleep(1)\n pos = time.monotonic() - start_time\n broadcast_pos(sock, pos, clients)\n logging.info('Position: %.9f' % pos)\n\n return 0\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv))\n"
}
] | 2 |
markmavromatis/udacity-full-stack-dev-remindersapp | https://github.com/markmavromatis/udacity-full-stack-dev-remindersapp | b5ba6da1e8ae59f1b248db01780421867064e8b8 | 618328a29f21b256174e6896b9eea6e1e331e900 | 12d87d11c74579fd4930774003853bb67e605729 | refs/heads/master | 2022-08-22T08:06:48.281388 | 2020-05-19T00:44:51 | 2020-05-19T00:44:51 | 265,002,297 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6615039110183716,
"alphanum_fraction": 0.6639730930328369,
"avg_line_length": 26.331289291381836,
"blob_id": "1317c6c028f591393cc0401c8d79de2226d9fc51",
"content_id": "9cf2551029f72d4eae33f4a786cec704c720bf3d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4455,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 163,
"path": "/app.py",
"repo_name": "markmavromatis/udacity-full-stack-dev-remindersapp",
"src_encoding": "UTF-8",
"text": "import sys\n\nfrom flask import abort, Flask, jsonify, render_template, request, redirect, url_for\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_migrate import Migrate\n\napp = Flask(__name__)\napp.config['SQLALCHEMY_DATABASE_URI'] = 'postgres://markmavromatis@localhost:5432/todoapp'\n\n# Disable warning message\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False;\n\ndb = SQLAlchemy(app)\n\nmigrate = Migrate(app, db)\n\n# Setup Todo Reminder class and backing table in Database\nclass Todo(db.Model):\n __tablename__ = 'todos'\n id = db.Column(db.Integer, primary_key=True)\n description = db.Column(db.String(), nullable=False)\n completed = db.Column(db.Boolean, nullable=False, default=False)\n list_id = db.Column(db.Integer, db.ForeignKey('todolists.id'), nullable = False)\n\n # Override display value\n def __repr__(self):\n return f'<Todo {self.id} {self.description}>'\n\nclass TodoList(db.Model):\n __tablename__ = 'todolists'\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(), nullable = False)\n todos = db.relationship('Todo', backref = 'list', lazy = False, cascade =\"all, delete-orphan\")\n\n def __repr__(self):\n return f'<Todo List {self.id} {self.description}'\n\n# Adds a new reminder\[email protected]('/todos/createToDo', methods=['POST'])\ndef create_todo():\n error = False\n body = {}\n try:\n description = request.get_json()['description']\n list_id = request.get_json()['list_id']\n todo = Todo(description=description, list_id=list_id)\n db.session.add(todo)\n db.session.commit()\n body['description'] = todo.description\n except:\n error = True\n db.session.rollback()\n print(sys.exc_info())\n finally:\n db.session.close()\n if error:\n abort (400)\n else:\n return jsonify(body)\n\n# Adds a new reminders list\[email protected]('/todos/createList', methods=['POST'])\ndef create_list():\n error = False\n body = {}\n try:\n name = request.get_json()['name']\n new_list = TodoList(name=name)\n db.session.add(new_list)\n db.session.commit()\n body['name'] = new_list.name\n except:\n error = True\n db.session.rollback()\n print(sys.exc_info())\n finally:\n db.session.close()\n if error:\n abort (400)\n else:\n return jsonify(body)\n\n# Update the completed status of a reminder\[email protected]('/todos/<todo_id>/set-completed-todo', methods=['POST'])\ndef set_completed_todo(todo_id):\n try:\n completed = request.get_json()['completed']\n todo = Todo.query.get(todo_id)\n todo.completed = completed\n db.session.commit()\n except:\n error = True\n db.session.rollback()\n print(sys.exc_info())\n finally:\n db.session.close()\n return redirect(url_for('index'))\n\n# Update the completed status of a list\[email protected]('/todos/<list_id>/set-completed-list', methods=['POST'])\ndef set_completed_list(list_id):\n try:\n todos = Todo.query.filter_by(list_id = list_id).all()\n # print(\"Found \" + str(todos.count()) + \" todos!\")\n for each_todo in todos:\n print(\"HI\")\n db.session.add(each_todo)\n print(\"Checking completed status of todo: \" + str(each_todo.id))\n each_todo.completed = True\n db.session.commit()\n except:\n error = True\n db.session.rollback()\n print(sys.exc_info())\n finally:\n db.session.close()\n return redirect(url_for('index'))\n\n# Delete a reminder\[email protected]('/todos/<todo_id>/delete', methods=['DELETE'])\ndef deleteTodo(todo_id):\n try:\n todo = Todo.query.get(todo_id)\n db.session.delete(todo)\n db.session.commit()\n except:\n error = True\n db.session.rollback()\n print(sys.exc_info())\n finally:\n db.session.close()\n return jsonify({\"success\": True})\n\n# Delete a reminder list\[email protected]('/lists/<list_id>/delete', methods=['DELETE'])\ndef deleteList(list_id):\n try:\n todo_list = TodoList.query.get(list_id);\n db.session.delete(todo_list);\n db.session.commit();\n except:\n error = True\n db.session.rollback()\n print(sys.exc_info())\n finally:\n db.session.close()\n return jsonify({\"success\": True})\n\n\n\n\n# Get list of Todos for a specific Todos List\[email protected]('/lists/<list_id>')\ndef get_list_todos(list_id):\n lists = TodoList.query.order_by('id').all()\n todos = Todo.query.filter_by(list_id = list_id).order_by('id').all()\n active_list = TodoList.query.get(list_id)\n return render_template('index.html', todos = todos, lists = lists, active_list = active_list)\n\n# Front Page\[email protected]('/')\ndef index():\n return redirect(url_for('get_list_todos', list_id=1))\n"
},
{
"alpha_fraction": 0.7971222996711731,
"alphanum_fraction": 0.8129496574401855,
"avg_line_length": 62.181819915771484,
"blob_id": "938b33c75c40a71667cf8c6ff9cdfa6b118c8408",
"content_id": "e32e2c1bc74d6ff6ca166aa05142a5ba8f793b85",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 695,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 11,
"path": "/README.md",
"repo_name": "markmavromatis/udacity-full-stack-dev-remindersapp",
"src_encoding": "UTF-8",
"text": "# udacity-full-stack-dev-remindersapp\nWeb application for adding/updating/removing/completing reminders\n\nThis is a Python/Flask/SqlAlchemy/Postgresql project to build a web-based reminders application. The project is part of the Udacity Full Stack Developer Nanodegree program as of May 2020. \n\nInstructions for Setup:\n1. Install Postgres\n1. Install Python 3\n1. Install necessary Python libraries using Pip: Flask, Flask-Migrate, Flask-SQLAlchemy, Psychopg2, SQLAlchemy\n1. Create an empty database in Postgres called 'todoapp'.\n1. Update the app.py script connection details to point to your Postgres database username. The host and port number are set to defaults so update them too if needed.\n"
},
{
"alpha_fraction": 0.6071784496307373,
"alphanum_fraction": 0.6390827298164368,
"avg_line_length": 26.86111068725586,
"blob_id": "d80f637ad77651f48873542f0a57e8face9438b4",
"content_id": "92814f467e515b941f1e5dbe3f923ca46d926391",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1003,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 36,
"path": "/migrations/versions/cc1869b1bfe0_.py",
"repo_name": "markmavromatis/udacity-full-stack-dev-remindersapp",
"src_encoding": "UTF-8",
"text": "\"\"\"empty message\n\nRevision ID: cc1869b1bfe0\nRevises: fcb94d9cb42d\nCreate Date: 2020-05-18 17:19:06.761982\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'cc1869b1bfe0'\ndown_revision = 'fcb94d9cb42d'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n # Before making list_id required, setup the 'Uncategorized' list\n # Assign all Todo items to this new list\n op.execute('INSERT INTO todolists (id, name) VALUES (1, \\'Uncategorized\\');')\n op.execute('UPDATE todos SET list_id = 1;')\n op.alter_column('todos', 'list_id',\n existing_type=sa.INTEGER(),\n nullable=False)\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.alter_column('todos', 'list_id',\n existing_type=sa.INTEGER(),\n nullable=True)\n # ### end Alembic commands ###\n"
}
] | 3 |
tsfkingsport/Thinkful-Folder | https://github.com/tsfkingsport/Thinkful-Folder | 2f3564ef1af4062ed9c5acc49adea83e5a014023 | 32cc6201b058bd1482b100f621cc4981685c9741 | 53353ca015a35de453d25e017a5d91031658b5c9 | refs/heads/master | 2020-04-27T15:24:46.864413 | 2019-11-18T23:13:44 | 2019-11-18T23:13:44 | 174,445,204 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5368421077728271,
"alphanum_fraction": 0.5736842155456543,
"avg_line_length": 26.600000381469727,
"blob_id": "9cc2fb2ea4b4e4f3cc80c651cb1c8c07db3ca003",
"content_id": "ad3d59b8ba396495c8e45c95e2016eceac926e5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 570,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 20,
"path": "/Module 6-10 Problem 5.py",
"repo_name": "tsfkingsport/Thinkful-Folder",
"src_encoding": "UTF-8",
"text": "def categorize_study(p_value, requirements):\r\n bs_factor = 1\r\n if requirements >= 6:\r\n bs_factor = 1\r\n elif requirements == 5:\r\n bs_factor = 2\r\n elif requirements == 4:\r\n bs_factor = 4\r\n elif requirements <= 3:\r\n bs_factor = 8\r\n elif p_value * bs_factor < .05:\r\n return 'Fine'\r\n elif p_value * bs_factor >= .05 and p_value * bs_factor <= .15:\r\n return 'Needs review'\r\n elif p_value * bs_factor > .15:\r\n return 'Pants on fire'\r\n elif requirements == 0:\r\n return 'Needs review'\r\n\r\nprint(categorize_study(.01,3))"
}
] | 1 |
pawelgalka/bubbleshooter | https://github.com/pawelgalka/bubbleshooter | 898995a1fa27bbbd8e3885ef429dce774c7458f8 | fa2893eeebc7750d652fc25a7115ff774e342867 | 3898515c2c112559e75b26beab0de3f98f604529 | refs/heads/master | 2020-03-26T17:13:50.504907 | 2018-08-18T21:26:18 | 2018-08-18T21:26:18 | 145,148,946 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6289381384849548,
"alphanum_fraction": 0.63710618019104,
"avg_line_length": 28.482759475708008,
"blob_id": "c73551035be1682893d67e97d147144bae69be02",
"content_id": "08eab6e573f1e9015de30818c5a325d99d02e4c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 857,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 29,
"path": "/arrow.py",
"repo_name": "pawelgalka/bubbleshooter",
"src_encoding": "UTF-8",
"text": "# coding=utf-8\n\n#file for class of bubble and it's events\n#Paweł Gałka 11.08\n\n\nfrom settings import *\n\nclass Arrow(pygame.sprite.Sprite):\n def __init__(self):\n pygame.sprite.Sprite.__init__(self)\n self.angle = 90\n arrowImage = pygame.image.load('arrow.png')\n arrowImage.convert_alpha()\n arrowRect = arrowImage.get_rect()\n self.image = arrowImage\n self.transImage = self.image\n self.rect = arrowRect\n self.rect.centerx = STARTX\n self.rect.centery = STARTY\n\n def draw(self):\n display.blit(self.transImage, self.rect)\n\n def update(self, angle, vector):\n self.transImage = pygame.transform.rotate(self.image, -self.angle+angle)\n self.rect = self.transImage.get_rect(center=self.rect.midbottom)\n self.rect.centerx = STARTX\n self.rect.centery = STARTY\n\n\n"
},
{
"alpha_fraction": 0.49080193042755127,
"alphanum_fraction": 0.5036613941192627,
"avg_line_length": 33.45846176147461,
"blob_id": "27cd30927eb0ecb4858822c8632bf01a54282bec",
"content_id": "a96c3969b6dc3105bec55531673e11288c207cd8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11198,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 325,
"path": "/board.py",
"repo_name": "pawelgalka/bubbleshooter",
"src_encoding": "UTF-8",
"text": "# coding=utf-8\n\nfrom settings import *\nfrom bubble import *\n\ndef CreateEmptyBoard():\n board = []\n\n for i in range (ROWS):\n col = []\n for j in range (COLS):\n col.append(EMPTY)\n board.append(col)\n\n return board\n\n\ndef FillBoard(board, colorlist):\n for i in range(STARTLAYERS):\n for j in range(len(board[i])):\n random.shuffle(colorlist)\n newBubble = Bubble(colorlist[0])\n board[i][j] = newBubble\n setPosition(board)\n\n\ndef setPosition(board):\n #set them in array\n for row in range(ROWS):\n for col in range(len(board[row])):\n if board[row][col]!=EMPTY:\n board[row][col].rect.x = (BALLSIZE*col)+5*WIDTH/640\n board[row][col].rect.y = (BALLSIZE*row)+5*HEIGHT/480\n #print(row,col,board[row][col].rect.x,board[row][col].rect.y)\n\n #make pattern - move odd rows\n for row in range (1, ROWS, 2):\n for col in range(len(board[row])):\n if board[row][col]!=EMPTY:\n board[row][col].rect.x += BALLRADIUS\n\n #delete empty space between balls\n for row in range(1, ROWS):\n for col in range(len(board[row])):\n if board[row][col]!=EMPTY:\n board[row][col].rect.y -= row*5*HEIGHT/480\n\n deleteExtraBalls(board)\n\n\ndef deleteExtraBalls(board):\n for row in range(ROWS):\n for col in range(len(board[row])):\n if board[row][col] != EMPTY:\n if board[row][col].rect.right > WIDTH:\n board[row][col] = EMPTY\n\ndef drawBoard(board):\n for i in range(len(board)):\n for j in range(len(board[i])):\n if board[i][j] != EMPTY:\n board[i][j].draw()\n\n\ndef getVector():\n mousePos = pygame.mouse.get_pos()\n vector = pygame.math.Vector2((mousePos[0] - STARTX, STARTY - mousePos[1]))\n if vector.x == 0 :\n return vector, 90\n if vector.y < 0 and vector.x < 0:\n return vector, 179\n if vector.y < 0 and vector.x > 0:\n return vector, 1\n angle = math.degrees(math.atan(vector.y / vector.x))\n if angle < 0:\n angle += 180\n #print(angle)\n return vector, angle\n\n\ndef getBubble(colors):\n random.shuffle(colors)\n return Bubble(colors[0], x=WIDTH-BALLSIZE-10)\n\n\ndef stopBubble(board, ball):\n for row in range(len(board)):\n for col in range(len(board[row])):\n # print(row,col)\n if (board[row][col] != EMPTY and ball != None):\n # print(ball.rect.top)\n if (pygame.sprite.collide_rect(ball, board[row][col])) or ball.rect.top <= 0:\n # print(pygame.sprite.collide_rect(ball, board[row][col]))\n\n if ball.rect.top <= 0:\n newCol, newRow = addToTop(ball, board)\n board[newRow][newCol] = copy.copy(ball)\n board[newRow][newCol].row = newRow\n board[newRow][newCol].col = newCol\n # print(newRow,newCol)\n\n\n\n elif ball.rect.centery>=board[row][col].rect.centery: #hitting under ball\n # print('pod',row,col)\n if ball.rect.centerx<board[row][col].rect.centerx: #LD corner\n\n if row%2==0: #longer line\n newRow = row + 1\n newCol = col - 1\n\n else: #shorter line\n newRow = row + 1\n newCol = col\n\n else: #RD corner\n if row%2==0: #longer line\n newRow = row + 1\n newCol = col\n\n else: #shorter line\n newRow = row + 1\n newCol = col + 1\n\n board[newRow][newCol] = copy.copy(ball)\n # print(board[newRow][newCol] is EMPTY)\n board[newRow][newCol].row = newRow\n board[newRow][newCol].col = newCol\n\n\n else: # hitting over ball\n # print('nad',row,col)\n if row == 0:\n # pass\n newCol, newRow = addToTop(ball, board)\n elif ball.rect.centerx < board[row][col].rect.centerx: # LU corner\n\n\n if row % 2 == 0: # longer line\n newRow = row - 1\n newCol = col - 1\n if board[newRow][newCol] is not EMPTY:\n newRow += 1\n\n else: # shorter line\n newRow = row - 1\n newCol = col\n if board[newRow][newCol] is not EMPTY:\n newRow += 1\n newCol -= 1\n\n else: # RU corner\n if row % 2 == 0: # longer line\n newRow = row - 1\n newCol = col\n if board[newRow][newCol] is not EMPTY:\n newRow += 1\n newCol += 1\n\n\n else: # shorter line\n newRow = row - 1\n newCol = col + 1\n if board[newRow][newCol] is not EMPTY:\n newRow += 1\n\n # print(newRow, newCol)\n board[newRow][newCol] = copy.copy(ball)\n board[newRow][newCol].row = newRow\n board[newRow][newCol].col = newCol\n\n deleteList = []\n deleteBubbles(board, newRow, newCol, ball.color, deleteList)\n if len(deleteList)>=3:\n popBubbles(board,deleteList)\n print(deleteList)\n\n deleteFloaters(board)\n ball = None\n setPosition(board)\n # updateColors(board,COLORS)\n print(COLORS)\n checkwin = checkWin(board)\n return ball, board, checkwin\n\n\n\ndef deleteFloaters(board):\n filledFirst = []\n pattern = [i for i in range(16)]\n\n for col in range(len(board[0])):\n if board[0][col]!=EMPTY:\n filledFirst.append(col)\n\n\n unfilledList = diff(filledFirst,pattern)\n unfilledList.insert(0,0)\n print(unfilledList)\n copyBoard = copy.deepcopy(board)\n for row in range (len(board)):\n for col in range(len(board[row])):\n board[row][col]=EMPTY\n print(board)\n for col in unfilledList:\n checkFloaters(board,copyBoard,0,col)\n\n\ndef checkFloaters(board, copyBoard, row, col):\n\n if row<0 or row>len(board)-1 or col<0 or col>len(board[row])-1:\n print(1)\n return\n\n elif copyBoard[row][col]==EMPTY:\n # print(row, col, board[row][col], copyBoard[row][col], end=' ')\n print(2)\n return\n\n elif board[row][col] == copyBoard[row][col]:\n # print(row, col, board[row][col], copyBoard[row][col], end=' ')\n print(3)\n return\n\n board[row][col] = copyBoard[row][col]\n\n if row%2 == 0: #check LU,RU,L,R,LD,RD\n if row!=0:\n checkFloaters(board, copyBoard, row - 1, col - 1) # left up\n checkFloaters(board, copyBoard, row - 1, col) # right up\n\n checkFloaters(board, copyBoard, row, col - 1) # left\n checkFloaters(board, copyBoard, row, col + 1) # right\n checkFloaters(board, copyBoard, row + 1, col - 1) # left down\n checkFloaters(board, copyBoard, row + 1, col) # right down\n\n else:\n checkFloaters(board, copyBoard, row - 1, col) # left up\n checkFloaters(board, copyBoard, row - 1, col + 1) # right up\n checkFloaters(board, copyBoard, row, col - 1) # left\n checkFloaters(board, copyBoard, row, col + 1) # right\n checkFloaters(board, copyBoard, row + 1, col) # left down\n checkFloaters(board, copyBoard, row + 1, col + 1) # right down\n\n\ndef addToTop(ball, board):\n newRow = 0\n x = ball.rect.centerx\n\n newCol = math.floor(x*COLS/WIDTH)\n # newCol = ((x + 5) * COLS) // WIDTH\n # if (board[newRow][newCol] is not EMPTY):\n # if ball.rect.right <= board[newRow][newCol].rect.left:\n # newCol -= 1\n # else:\n # newCol += 1\n return newCol, newRow\n\n\ndef deleteBubbles(board, row, col, color, deleteList):\n # print(\"wejscie\")\n if row < 0 or row > len(board)-1 or col < 0 or col > len(board[row])-1: # out of range\n return\n\n if board[row][col] is EMPTY: # field is empty\n return\n\n if board[row][col].color != color: # not right color\n return\n\n for ball in deleteList: # check if field is not already on list to delete\n if ball[0]==row and ball[1]==col:\n return\n\n deleteList.append((row,col))\n\n\n # if row == 0: #check L,R,LD,RD\n # deleteBubbles(board, row, col - 1, color, deleteList) # left\n # deleteBubbles(board, row, col + 1, color, deleteList) # right\n # deleteBubbles(board, row + 1, col - 1, color, deleteList) # left down\n # deleteBubbles(board, row + 1, col, color, deleteList) # right down\n\n if row%2 == 0: #check LU,RU,L,R,LD,RD\n if row!=0:\n deleteBubbles(board, row - 1, col - 1, color, deleteList) # left up\n deleteBubbles(board, row - 1, col, color, deleteList) # right up\n\n deleteBubbles(board, row, col - 1, color, deleteList) # left\n deleteBubbles(board, row, col + 1, color, deleteList) # right\n deleteBubbles(board, row + 1, col - 1, color, deleteList) # left down\n deleteBubbles(board, row + 1, col, color, deleteList) # right down\n\n else:\n deleteBubbles(board, row - 1, col, color, deleteList) # left up\n deleteBubbles(board, row - 1, col + 1, color, deleteList) # right up\n deleteBubbles(board, row, col - 1, color, deleteList) # left\n deleteBubbles(board, row, col + 1, color, deleteList) # right\n deleteBubbles(board, row + 1, col, color, deleteList) # left down\n deleteBubbles(board, row + 1, col + 1, color, deleteList) # right down\n\n# def updateColors(board, colorList):\n# colorList.clear()\n# for row in range (len(board)):\n# for col in range(len(board[row])):\n# if board[row][col]!=EMPTY and board[row][col].color not in colorList :\n# colorList.append(color)\n\ndef popBubbles(board, deleteList):\n pygame.time.delay(40)\n for bubble in deleteList:\n board[bubble[0]][bubble[1]] = EMPTY\n\ndef checkWin(board):\n for row in range (len(board)):\n for col in range(len(board[row])):\n if board[row][col]!=EMPTY:\n return False\n return True\n\ndef checkBottom(board):\n for col in range (len(board[10])):\n if board[10][col]!=EMPTY:\n return False\n return True"
},
{
"alpha_fraction": 0.5804877877235413,
"alphanum_fraction": 0.671219527721405,
"avg_line_length": 18.730770111083984,
"blob_id": "95b08d84ac148f6234215aa30163a2d4ee5a274e",
"content_id": "af587fc3817d1cf4d584065eeaf2eb31088a5d70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1025,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 52,
"path": "/settings.py",
"repo_name": "pawelgalka/bubbleshooter",
"src_encoding": "UTF-8",
"text": "# coding=utf-8\n\n#Paweł Gałka 11.08\nimport sys\nimport pygame as pygame\nimport pygame.gfxdraw\nimport random, math, time, copy\nfrom pygame.locals import *\nrandom.seed()\n\n#colors\nRED = (255, 0, 0)\nGREEN = (0, 255, 0)\nBLUE = (0, 0, 255)\nORANGE = (255, 128, 0)\nYELLOW = (255, 255, 0)\nPURPLE = (102, 0, 101)\nNAVY = (13, 200, 255)\nWHITE = (255, 255, 255)\nBLACK = (0, 0, 0)\nBEIGE = (229, 255, 204)\n\nCOLORS = [RED, GREEN, BLUE, ORANGE, YELLOW, PURPLE, NAVY]\nBGCOLOR = BEIGE\n\n#game settings\n\nFPS = 120\nWIDTH = 640\nHEIGHT = 480\nTEXT = 20\nBALLRADIUS = WIDTH//32\nBALLSIZE = 2*BALLRADIUS\nBALLHEIGHT = 9\nSTARTX = WIDTH/2\nSTARTY = HEIGHT - BALLSIZE\nROWS = 14\nCOLS = 16\nEMPTY = 0\nFULL = 1\nSTARTLAYERS = 5\n\nglobal display\ndisplay = pygame.display.set_mode((WIDTH, HEIGHT)) # tuple width,height\npygame.display.set_caption(\"BUBBLE SHOOTER\") # change title of window\ndisplay.convert()\n\n# help functions not related to game\n\ndef diff(first, second):\n second = set(second)\n return [item for item in first if item not in second]"
},
{
"alpha_fraction": 0.6048780679702759,
"alphanum_fraction": 0.6121951341629028,
"avg_line_length": 33.25,
"blob_id": "d8f49698e96754a04e32631f52b43fc1b7006536",
"content_id": "26153c99c9f7b582c0127e42a3bade6d7a0e320f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 410,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 12,
"path": "/setup.py",
"repo_name": "pawelgalka/bubbleshooter",
"src_encoding": "UTF-8",
"text": "import cx_Freeze #for now, do the wildcard import, though the bigger the script gets, I would recommend an as ... structure\n\nexecutables = [cx_Freeze.Executable(\"main.py\")]\n\ncx_Freeze.setup(\n name=\"Bubble shooter\",\n author = \"Pawel Galka\",\n options={\"build_exe\": {\"packages\":[\"pygame\"],\n \"include_files\":[\"arrow.png\"]}},\n executables = executables,\n version = \"1.0.0\"\n )"
},
{
"alpha_fraction": 0.5132052898406982,
"alphanum_fraction": 0.5198079347610474,
"avg_line_length": 19.329267501831055,
"blob_id": "b30dd3e4278f5af2e064414fb5569b3237d39bb7",
"content_id": "0c49d11bc841402c4cc1869ab6d8ab8c112f2c2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1666,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 82,
"path": "/main.py",
"repo_name": "pawelgalka/bubbleshooter",
"src_encoding": "UTF-8",
"text": "# coding=utf-8\n\n#Paweł Gałka 11.08\n\nfrom settings import *\nfrom bubble import *\nfrom arrow import Arrow\nfrom board import *\n\n\ndef main():\n pygame.init()\n clock = pygame.time.Clock()\n board = CreateEmptyBoard()\n\n FillBoard(board,COLORS)\n launchBall = False\n ball = getBubble(COLORS)\n ball.rect.centerx = STARTX\n nextBall = getBubble(COLORS)\n # board[0][15] = copy.deepcopy(ball)\n # setPosition(board)\n arrow = Arrow()\n\n while 1: # main game loop\n display.fill(BEIGE)\n vector, angle = getVector()\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n pygame.quit()\n sys.exit()\n\n\n\n if event.type == MOUSEBUTTONDOWN:\n if not launchBall:\n ball.shoot(angle)\n\n launchBall = True\n\n\n if event.type == MOUSEMOTION:\n arrow.update(angle, vector)\n\n\n\n drawBoard(board)\n nextBall.draw()\n\n if ball is not None:\n\n ball.update()\n ball.draw()\n #print(ball.rect.centerx, ball.rect.centery)\n ball, board, checkwin = stopBubble(board, ball)\n\n\n else:\n launchBall = False\n ball = Bubble(nextBall.color)\n nextBall = getBubble(COLORS)\n\n arrow.draw()\n\n\n if checkwin:\n return 1\n elif checkBottom(board)==False:\n return 2\n pygame.display.update()\n clock.tick(FPS)\n\n\n\n\nif __name__=='__main__':\n main()"
},
{
"alpha_fraction": 0.5896444916725159,
"alphanum_fraction": 0.6112828254699707,
"avg_line_length": 29,
"blob_id": "be9c0cbbfc06137e6bc8420ae285750e5e5b3df0",
"content_id": "d3132f66d3f2938c7f0d797e2cb0b9924fbc7660",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1294,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 43,
"path": "/bubble.py",
"repo_name": "pawelgalka/bubbleshooter",
"src_encoding": "UTF-8",
"text": "# coding=utf-8\n\n#file for class of bubble and it's events\n#Paweł Gałka 11.08\n\n\nfrom settings import *\n\n\nclass Bubble(pygame.sprite.Sprite):\n def __init__(self, color, row=0, column=0, x=STARTX, y=STARTY):\n pygame.sprite.Sprite.__init__(self)\n self.rect = pygame.Rect(0,0,30,30) #30 because balls have grid\n self.rect.centerx = x\n self.rect.centery = y\n self.radius = BALLRADIUS\n self.color = color\n self.row = row\n self.column = column\n self.speed = 0\n self.angle = 0\n\n def update(self, *args):\n xmove = math.cos(math.radians(self.angle))*self.speed\n ymove = -math.sin(math.radians(self.angle))*self.speed\n self.rect.centerx += xmove\n self.rect.centery += ymove\n\n if self.rect.left<0 or self.rect.right>WIDTH:\n self.angle = 180-self.angle\n\n if self.rect.top<0 or self.rect.bottom>HEIGHT:\n self.angle = 180-self.angle\n self.speed *= -1\n\n def draw(self):\n pygame.gfxdraw.filled_circle(display, self.rect.centerx, self.rect.centery, self.radius, self.color)\n pygame.gfxdraw.aacircle(display,self.rect.centerx, self.rect.centery, self.radius, BLACK)\n\n\n def shoot(self, angle):\n self.angle = angle\n self.speed = 10\n\n\n\n\n"
}
] | 6 |
gokulab/sparrow | https://github.com/gokulab/sparrow | 5e4d4758d594d59dbeaa692e89c26057c27333be | 13b950666af00a29c5dd371c81b783d15f6a8af0 | 3169b6a4973df9713e334b3b922e6b4bff40f37a | refs/heads/master | 2016-08-03T08:43:15.485084 | 2015-08-17T15:14:11 | 2015-08-17T15:14:11 | 37,471,334 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6856390237808228,
"alphanum_fraction": 0.686429500579834,
"avg_line_length": 30.625,
"blob_id": "ebf2e02335428da9981e271ef4ebc8e70915f6f2",
"content_id": "1a61c5140bff7f682fb794ac1b27a59a784aa946",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3795,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 120,
"path": "/src/sparrow/tcp_server.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#include <assert.h>\n\n#include <sparrow/tcp_server.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/tcp_connection.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/entity_init.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(TcpServer)\n\nbool TcpServer::OnInit(const VariantList& args) {\n conn_cb_ = nullptr;\n conn_cb_ud_ = nullptr;\n listen_handle_ = nullptr;\n TcpServerInit cap;\n if (cap.Deserialize(args)) { return false; }\n do {\n struct sockaddr_in bind_addr;\n int result = uv_ip4_addr(cap.listen_addr.c_str(), cap.listen_port, &bind_addr);\n if (result) {\n Trace(TraceLevel::Error, \"uv_ip4_addr error: %d\", result);\n break;\n }\n listen_handle_ = new uv_tcp_t();\n uv_tcp_init(uv_default_loop(), listen_handle_);\n listen_handle_->data = this;\n result = uv_tcp_bind(listen_handle_, (sockaddr*)&bind_addr, 0);\n if (result) { \n Trace(TraceLevel::Error, \"uv_tcp_bind failed: %d\", result);\n break; \n }\n result = uv_listen((uv_stream_t*)listen_handle_, SOMAXCONN, OnConnection);\n if (result) { \n Trace(TraceLevel::Error, \"uv_listen failed: %d\", result);\n break; \n }\n return true;\n } while (false);\n if (listen_handle_) {\n uv_close((uv_handle_t*)listen_handle_, &TcpServer::OnListenClosed);\n }\n return false;\n}\n\n\nvoid TcpServer::OnUninit() {\n if (listen_handle_) {\n listen_handle_->data = nullptr;\n uv_close((uv_handle_t*)listen_handle_, &TcpServer::OnListenClosed);\n }\n}\n\n\nvoid TcpServer::OnListenClosed(uv_handle_t* handle) {\n auto self = (TcpServer*)handle->data;\n if (self) { self->listen_handle_ = nullptr; }\n delete handle;\n}\n\n\nvoid TcpServer::OnClientClosed(uv_handle_t* handle) {\n delete handle;\n}\n\n\nvoid TcpServer::OnConnection(uv_stream_t* server, int status) {\n if (status) {\n Trace(TraceLevel::Error, \"accept connect error: %s\", uv_err_name(status));\n return;\n }\n auto self = (TcpServer*)server->data;\n if (!self) {\n Trace(TraceLevel::Info, \"ignore peer connection(server has exit)\");\n return;\n }\n\n uv_tcp_t *handle = new uv_tcp_t();\n uv_tcp_init(uv_default_loop(), handle);\n int result;\n result = uv_accept((uv_stream_t*)self->listen_handle_, (uv_stream_t*)handle);\n if (result) {\n Trace(TraceLevel::Error, \"uv_accept failed: %d\", result);\n delete handle; // just delete, no need to call uv_close\n return;\n }\n connection_id_t connid = GetConnectionPool()->AddConnection(handle);\n if (INVALID_CONNECTION_ID == connid) {\n Trace(TraceLevel::Error, \"AddConnection failed\");\n uv_close((uv_handle_t*)handle, &TcpServer::OnClientClosed);\n return;\n }\n if (self->conn_cb_) {\n self->conn_cb_(self->conn_cb_ud_, connid);\n }\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6916466355323792,
"alphanum_fraction": 0.6947404742240906,
"avg_line_length": 28.09000015258789,
"blob_id": "c85fa28e5846513ae052bfcab33b89cc673a29e8",
"content_id": "fec69228a39bbc6a1615d104bf46298d723214cc",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2911,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 100,
"path": "/include/sparrow/event_proto.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_EVENT_PROTO_H\n#define SPARROW_EVENT_PROTO_H\n\n#include <string>\n#include <assert.h>\n\n#include \"sparrow_define.h\"\n#include \"i_serialize.h\"\n#include \"proto/cluster.h\"\n\nNS_SPARROW_BEG\n\nstruct EventHarborOnline : public ISerialize {\n node_id_t node_id;\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(node_id);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n istream.GetValue(0, node_id);\n return 0;\n }\n};\n\n\ntypedef EventHarborOnline EventHarborOffline;\n\n\nstruct EventHarborCommand : public ISerialize {\n node_id_t source;\n VariantList args;\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(source);\n const size_t sz = args.GetByteSize();\n ostream.AddValue(sz);\n if (sz) {\n std::unique_ptr<char[]> data(new char[sz]);\n const int ret = args.Serialize(data.get(), sz);\n assert(!ret);\n ostream.AddValue(data.get(), sz);\n }\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n istream.GetValue(0, source);\n size_t sz = 0;\n istream.GetValue(1, sz);\n if (sz) {\n block_t block = istream.GetValue<block_t>(2);\n const int ret = args.Deserialize(block.data, block.sz);\n assert(!ret);\n }\n return 0;\n }\n};\n\n\ninline int ExtractHarborCommand(const VariantList& args,\n node_id_t& source,\n HarborCommand& object) {\n EventHarborCommand e_harbor_command;\n int ret = e_harbor_command.Deserialize(args);\n if (ret) { return ret; }\n ret = object.Deserialize(e_harbor_command.args);\n if (!ret) {\n source = e_harbor_command.source;\n }\n return ret;\n}\n\nNS_SPARROW_END\n\n#endif // SPARROW_EVENT_PROTO_H\n"
},
{
"alpha_fraction": 0.6366355419158936,
"alphanum_fraction": 0.6394453048706055,
"avg_line_length": 31.072673797607422,
"blob_id": "5b629d869434d36eb11392366fd0ffe147e13d18",
"content_id": "8843c08f3e728da103e74ca79a8eafc975da8390",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 11155,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 344,
"path": "/test_modules/chat_module/chat_backend_logic.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <assert.h>\n#include <sparrow/utils/string_utils.h>\n\n#include \"chat_backend_logic.h\"\n\nChatBackendLogic::ChatBackendLogic() {\n handlers_[CC_HELP] = &ChatBackendLogic::HandleHelp;\n handlers_[CC_SETNICK] = &ChatBackendLogic::HandleSetNick;\n handlers_[CC_CREATEROOM] = &ChatBackendLogic::HandleCreateRoom;\n handlers_[CC_LEAVEROOM] = &ChatBackendLogic::HandleLeaveRoom;\n handlers_[CC_ROOMMSG] = &ChatBackendLogic::HandleSendRoomMsg;\n handlers_[CC_WHSPMSG] = &ChatBackendLogic::HandleSendWhspMsg;\n handlers_[CC_WORLDMSG] = &ChatBackendLogic::HandleSendWorldMsg;\n handlers_[CC_LISTROOM] = &ChatBackendLogic::HandleListRoom;\n}\n\n\nChatBackendLogic::~ChatBackendLogic() {\n}\n\n\nvoid ChatBackendLogic::OnPlayerEnter(IKernel* kernel, IClient* player) {\n kernel_ = kernel;\n auto iter = chat_users_.find(player);\n assert(iter == chat_users_.end());\n ChatUserPtr chat_user = std::make_shared<ChatUser>(player);\n chat_users_.insert(std::make_pair(player, chat_user));\n // 登记到中心服\n ChatRpcAddPlayer rpc_add_player;\n rpc_add_player.cuuid = player->GetClientUuid();\n SheduleChatRpc(kernel, rpc_add_player);\n}\n\n\nvoid ChatBackendLogic::OnPlayerExit(IKernel* kernel, IClient* player) {\n auto iter = chat_users_.find(player);\n assert(iter != chat_users_.end());\n chat_users_.erase(iter);\n // 从中心服上删除\n ChatRpcRemovePlayer rpc_rm_player;\n rpc_rm_player.cuuid = player->GetClientUuid();\n SheduleChatRpc(kernel, rpc_rm_player);\n}\n\n\nvoid ChatBackendLogic::OnPlayerMessage(IKernel* kernel, IClient* player,\n const void* message, size_t sz) {\n ChatUserPtr chat_user = GetChatUser(player);\n assert(chat_user != nullptr);\n goku::VariantList var;\n int ret = var.Deserialize(message, sz);\n assert(!ret);\n ChatCommand chat_command;\n ret = chat_command.Deserialize(var);\n assert(!ret);\n std::vector<std::string> args = StringUtils::Split(chat_command.command_line, \" \");\n if (args.empty()) {\n chat_user->Send(\"command error\");\n return;\n }\n const std::string command = args[0];\n if (command != CC_HELP && command != CC_SETNICK\n && chat_user->GetNickName().empty()) {\n chat_user->Send(\"You should set a nick name first.\");\n return;\n }\n auto cmd_iter = handlers_.find(command);\n if (cmd_iter == handlers_.end()) {\n chat_user->Send(\"command error\");\n return;\n }\n (this->*(cmd_iter->second))(kernel, player, args);\n}\n\n\nvoid ChatBackendLogic::HandleHelp(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n chat_user->Send(\"Fuck, can you read the code?\");\n}\n\n\nvoid ChatBackendLogic::HandleSetNick(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (command.size() <= 1) {\n chat_user->Send(\"command error\");\n return;\n }\n // 由于昵称是全局唯一的,所以转给中心服处理\n const std::string nick_name = command[1];\n ChatRpcSetNickName rpc_set_name;\n rpc_set_name.cuuid = player->GetClientUuid();\n rpc_set_name.nick_name = nick_name;\n SheduleChatRpc(kernel, rpc_set_name);\n}\n\n\nvoid ChatBackendLogic::HandleCreateRoom(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (command.size() <= 1) {\n chat_user->Send(\"command error\");\n return;\n }\n std::string room_name = command[1];\n ChatRoom *chat_room = GetRoom(room_name);\n if (!chat_room) {\n chat_room = CreateRoom(room_name);\n }\n chat_room->AddUser(chat_user);\n char message[512] = { 0 };\n sprintf(message, \"%s enter room %s\", chat_user->GetNickName().c_str(),\n room_name.c_str());\n chat_room->Send(message);\n}\n\n\nvoid ChatBackendLogic::HandleLeaveRoom(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (command.size() <= 1) {\n chat_user->Send(\"command error\");\n return;\n }\n const std::string room_name = command[1];\n auto iter = chat_rooms_.find(room_name);\n if (iter == chat_rooms_.end()) {\n chat_user->Send(\"room not exist\");\n return;\n }\n if (!iter->second->IsUserExist(chat_user)) {\n chat_user->Send(\"You have not enter the target room\");\n return;\n }\n iter->second->RemoveUser(chat_user);\n const size_t user_count = iter->second->GetUserCount();\n if (!user_count) {\n chat_rooms_.erase(iter);\n chat_user->Send(\"the room has been destroyed successfully\");\n }\n}\n\n\nvoid ChatBackendLogic::HandleSendRoomMsg(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (command.size() <= 2) {\n chat_user->Send(\"command error\");\n return;\n }\n const std::string room_name = command[1];\n const std::string content = command[2];\n ChatRoom* chat_room = GetRoom(room_name);\n if (!chat_room) {\n chat_user->Send(\"room not exist\");\n return;\n }\n if (!chat_room->IsUserExist(chat_user)) {\n chat_user->Send(\"You aren't in the room\");\n return;\n }\n char message[512];\n const std::string nick_name = chat_user->GetNickName();\n sprintf(message, \"%s say: %s\", nick_name.c_str(), content.c_str());\n chat_room->Send(message);\n}\n\n\nvoid ChatBackendLogic::HandleSendWhspMsg(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (command.size() <= 2) {\n chat_user->Send(\"command error\");\n return;\n }\n const std::string target_name = command[1];\n const std::string content = command[2];\n // 私聊消息可以跨服,也交给中心服处理\n ChatRpcWhisperMessage rpc_whisp;\n rpc_whisp.sender = chat_user->GetNickName();\n rpc_whisp.target_name = target_name;\n rpc_whisp.content = content;\n SheduleChatRpc(kernel, rpc_whisp);\n}\n\n\nvoid ChatBackendLogic::HandleSendWorldMsg(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (command.size() <= 1) {\n chat_user->Send(\"command error\");\n return;\n }\n const std::string content = command[1];\n // 世界消息,转给中心服\n ChatRpcWorldMessage rpc_world;\n rpc_world.sender = chat_user->GetNickName();\n rpc_world.content = content;\n SheduleChatRpc(kernel, rpc_world);\n}\n\n\nvoid ChatBackendLogic::HandleListRoom(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command) {\n ChatUserPtr chat_user = GetChatUser(player);\n if (chat_rooms_.empty()) {\n chat_user->Send(\"there have no room in this server\");\n return;\n }\n std::string room_names;\n for (auto& iter : chat_rooms_) {\n room_names += iter.first + \" \";\n }\n chat_user->Send(room_names);\n}\n\n\nChatUserPtr ChatBackendLogic::GetChatUser(IClient* player) {\n auto iter = chat_users_.find(player);\n if (iter != chat_users_.end()) {\n return iter->second;\n }\n return nullptr;\n}\n\n\nChatUserPtr ChatBackendLogic::GetChatUser(const std::string& nick_name) {\n for (auto& iter : chat_users_) {\n if (iter.second->GetNickName() == nick_name) {\n return iter.second;\n }\n }\n return nullptr;\n}\n\n\nChatUserPtr ChatBackendLogic::GetChatUser(const client_uuid_t& cuuid) {\n for (auto& iter : chat_users_) {\n if (iter.first->GetClientUuid() == cuuid) {\n return iter.second;\n }\n }\n return nullptr;\n}\n\n\nbool ChatBackendLogic::IsRoomExist(const std::string& room_name) const {\n auto iter = chat_rooms_.find(room_name);\n return iter != chat_rooms_.end();\n}\n\n\nChatRoom* ChatBackendLogic::CreateRoom(const std::string& room_name) {\n if (IsRoomExist(room_name)) { return nullptr; }\n ChatRoom* chat_room = new ChatRoom(room_name);\n chat_rooms_.insert(std::make_pair(room_name,\n std::unique_ptr<ChatRoom>(chat_room)));\n return chat_room;\n}\n\n\nChatRoom* ChatBackendLogic::GetRoom(const std::string& room_name) {\n auto iter = chat_rooms_.find(room_name);\n if (iter != chat_rooms_.end()) {\n return iter->second.get();\n } else {\n return nullptr;\n }\n}\n\n\nint ChatBackendLogic::SheduleChatRpc(IKernel* kernel, const ChatRpc& chat_rpc) {\n const node_id_t chat_center_node = 300;\n VariantList var;\n int ret = chat_rpc.Serialize(var);\n assert(!ret);\n return kernel->ScheduleRpc(chat_center_node, chat_rpc.GetCommand(), var,\n &ChatBackendLogic::OnRpcInvokeReturn, this);\n}\n\n\nvoid ChatBackendLogic::OnRpcInvokeReturn(uint32_t func, \n int result, \n const VariantList& value,\n void* ud) {\n auto self = (ChatBackendLogic*)ud;\n switch (func) {\n case RPC_CHAT_SETNICKNAME:\n self->HandleRpcResponseSetNickName(result, value);\n break;\n case RPC_CHAT_ADDPLAYER:\n case RPC_CHAT_RMPLAYER:\n case RPC_CHAT_WORLDMESSAGE:\n case RPC_CHAT_WHISPERMESSAGE:\n break;\n default:\n assert(0);\n break;\n }\n}\n\n\nvoid ChatBackendLogic::HandleRpcResponseSetNickName(int result,\n const VariantList& value) {\n assert(!result);\n ChatRpcSetNickNameResponse response;\n int ret = response.Deserialize(value);\n assert(!ret);\n auto chat_user = GetChatUser(response.cuuid);\n chat_user->SetNickName(response.nick_name);\n char message[512];\n sprintf(message, \"You set nick name to %s\", response.nick_name.c_str());\n chat_user->Send(message);\n}\n"
},
{
"alpha_fraction": 0.5273863077163696,
"alphanum_fraction": 0.549986720085144,
"avg_line_length": 33.6682014465332,
"blob_id": "c4a28596c4f20d4c6fab0a55661d43a25b9d63b8",
"content_id": "68fa6f76274b91e1f23f7a31b043b5a36cf7d498",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7522,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 217,
"path": "/bin/chat_client/variant.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "# Copyright http://www.gokulab.com. All rights reserved.\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\nimport struct\n\nVT_INT8 = 0 # b\nVT_INT16 = 1 # h\nVT_INT32 = 2 # i\nVT_INT64 = 3 # q\nVT_UINT8 = 4 # B\nVT_UINT16 = 5 # H\nVT_UINT32 = 6 # I\nVT_UINT64 = 7 # Q\nVT_FLOAT = 8 # f\nVT_DOUBLE = 9 # d\nVT_STRING = 11\nVT_BLOCK = 12\nVT_UNKNOWN = 13\n\nclass Variant:\n def __init__(self, vt = VT_UNKNOWN, value = 0):\n self.vt = vt\n self.value = value\n \n def __str__(self):\n return \"vt = \" + str(self.vt) + \", value = \" + str(self.value)\n \n def Serialize(self):\n data = struct.pack(\"<B\", self.vt)\n if VT_STRING == self.vt:\n str_sz = len(self.value)\n data = data + struct.pack(\"<H\", str_sz)\n data = data + self.value\n elif VT_INT8 == self.vt:\n data = data + struct.pack(\"<b\", self.value)\n elif VT_INT16 == self.vt:\n data = data + struct.pack(\"<h\", self.value)\n elif VT_INT32 == self.vt: \n data = data + struct.pack(\"<i\", self.value)\n elif VT_INT64 == self.vt:\n data = data + struct.pack(\"<q\", self.value)\n elif VT_UINT8 == self.vt:\n data = data + struct.pack(\"<B\", self.value)\n elif VT_UINT16 == self.vt:\n data = data + struct.pack(\"<H\", self.value)\n elif VT_UINT32 == self.vt:\n data = data + struct.pack(\"<I\", self.value)\n elif VT_UINT64 == self.vt:\n data = data + struct.pack(\"<Q\", self.value)\n elif VT_FLOAT == self.vt:\n data = data + struct.pack(\"<f\", self.value)\n elif VT_DOUBLE == self.vt:\n data = data + struct.pack(\"<d\", self.value)\n elif VT_BLOCK == self.vt:\n block_sz = len(self.value)\n data += struct.pack(\"<H\", block_sz)\n data += self.value\n else:\n raise Exception(\"unknown variant type cannot Serialize\")\n return data\n\n def Deserialize(self, data):\n self.vt, = struct.unpack(\"<B\", data[0])\n if VT_STRING == self.vt:\n str_sz, = struct.unpack(\"<H\", data[1:3])\n self.value = data[3 : 3 + str_sz]\n elif VT_INT8 == self.vt:\n self.value, = struct.unpack(\"<b\", data[1:2])\n elif VT_INT16 == self.vt:\n self.value, = struct.unpack(\"<h\", data[1:3])\n elif VT_INT32 == self.vt: \n self.value, = struct.unpack(\"<i\", data[1:5])\n elif VT_INT64 == self.vt:\n self.value, = struct.unpack(\"<q\", data[1:9])\n elif VT_UINT8 == self.vt:\n self.value, = struct.unpack(\"<B\", data[1:2])\n elif VT_UINT16 == self.vt:\n self.value, = struct.unpack(\"<H\", data[1:3])\n elif VT_UINT32 == self.vt:\n self.value, = struct.unpack(\"<I\", data[1:5])\n elif VT_UINT64 == self.vt:\n self.value, = struct.unpack(\"<Q\", data[1:9])\n elif VT_FLOAT == self.vt:\n self.value, = struct.unpack(\"<f\", data[1:5])\n elif VT_DOUBLE == self.vt:\n self.value, = struct.unpack(\"<d\", data[1:9])\n elif VT_BLOCK == self.vt:\n block_sz, = struct.unpack(\"<H\", data[1:3])\n if block_sz > 0:\n self.value = data[3 : 3 + block_sz]\n else:\n self.value = ''\n else:\n raise Exception(\"unknown variant type cannot Serialize\") \n \n def GetByteSize(self):\n sz = 1; # used for save type info\n if self.vt == VT_INT8 or self.vt == VT_UINT8:\n sz += 1\n elif self.vt == VT_INT16 or self.vt == VT_UINT16:\n sz += 2\n elif self.vt == VT_INT32 or self.vt == VT_UINT32 or self.vt == VT_FLOAT:\n sz += 4\n elif self.vt == VT_INT64 or self.vt == VT_UINT64 or self.vt == VT_DOUBLE:\n sz += 8\n elif self.vt == VT_STRING:\n sz += 2 + len(self.value);\n elif self.vt == VT_BLOCK:\n sz += 2 + len(self.value)\n else:\n raise Exception(\"Are you kidding me?!\")\n return sz\n \n \nclass VariantList:\n def __init__(self):\n self.data = []\n \n def __str__(self):\n text = \"data count = %d\" % len(self.data)\n for var in self.data:\n text += \"\\n\" + str(var)\n return text\n \n def Serialize(self):\n data = ''\n for var in self.data:\n data = data + var.Serialize()\n return data\n\n def Deserialize(self, data):\n self.data = []\n sz = len(data)\n offset = 0\n while sz > 0:\n var = Variant()\n var.Deserialize(data[offset:])\n self.data.append(var)\n piece_sz = var.GetByteSize()\n sz -= piece_sz\n offset += piece_sz\n \n def addString(self, value):\n self.data.append(Variant(VT_STRING, value))\n \n def addInt8(self, value):\n self.data.append(Variant(VT_INT8, value)) \n\n def addInt16(self, value):\n self.data.append(Variant(VT_INT16, value))\n \n def addInt32(self, value):\n self.data.append(Variant(VT_INT32, value))\n \n def addInt64(self, value):\n self.data.append(Variant(VT_INT64, value)) \n\n def addUInt8(self, value):\n self.data.append(Variant(VT_UINT8, value)) \n\n def addUInt16(self, value):\n self.data.append(Variant(VT_UINT16, value))\n \n def addUInt32(self, value):\n self.data.append(Variant(VT_UINT32, value))\n \n def addUInt64(self, value):\n self.data.append(Variant(VT_UINT64, value)) \n \n def addFloat(self, value):\n self.data.append(Variant(VT_FLOAT, value))\n \n def addDouble(self, value):\n self.data.append(Variant(VT_DOUBLE, value)) \n \n def addBlock(self, value):\n self.data.append(Variant(VT_BLOCK, value)) \n \n def getValue(self, index):\n return self.data[index].value\n\nif __name__ == '__main__':\n varlist = VariantList()\n varlist.addInt8(1)\n varlist.addInt16(2)\n varlist.addInt32(3)\n varlist.addInt64(4)\n varlist.addUInt8(1)\n varlist.addUInt16(2)\n varlist.addUInt32(3)\n varlist.addUInt64(4) \n varlist.addFloat(5.1)\n varlist.addDouble(7.25)\n varlist.addString(\"hello\")\n data = varlist.Serialize()\n varlist2 = VariantList()\n varlist2.Deserialize(data)\n print varlist\n print \"------------------------\"\n print varlist2"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.75,
"avg_line_length": 13.666666984558105,
"blob_id": "00f918c040ca488bbce7e32071003ef7f794c4f0",
"content_id": "9285107416d2d94f8430bffe06c867d4b95987e6",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 44,
"license_type": "permissive",
"max_line_length": 32,
"num_lines": 3,
"path": "/bin/start_chat_center.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "import os\n\nos.system(\"sparrow chat_center\")\n"
},
{
"alpha_fraction": 0.6495348811149597,
"alphanum_fraction": 0.6525581479072571,
"avg_line_length": 30.851852416992188,
"blob_id": "3803fff8a3a9749c6dd1f20a891e5e08da82ab0e",
"content_id": "9749dab64ab9320fd46d49a998a614b20349f4ac",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4300,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 135,
"path": "/src/sparrow/event_emitter.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#include <assert.h>\n\n#include <sparrow/event_emitter.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(EventEmitter)\n\nevent_id_t EventEmitter::AddListener(uint32_t signal, event_cb_t cb, void* ud) {\n assert(signal != RESERVED_EVENT_ID);\n observer_list& observers = event_map_[signal];\n event_observer_t* observer = new event_observer_t(cb, ud);\n observers.push_back(std::unique_ptr<event_observer_t>(observer));\n return observer;\n}\n\n\nvoid EventEmitter::RemoveListener(uint32_t signal, event_id_t id) {\n auto iter = event_map_.find(signal);\n if (iter == event_map_.end()) { return; }\n observer_list& observers = iter->second;\n observer_list::iterator iter_ob = observers.begin();\n while (iter_ob != observers.end()) {\n event_observer_t* observer = iter_ob->get();\n if (observer == (event_observer_t*)id) {\n if (emmiting_event_ == signal) {\n (*iter_ob)->removed = true; // delay erase during emitting\n } else {\n observers.erase(iter_ob);\n }\n break;\n }\n ++iter_ob;\n }\n}\n\n\nvoid EventEmitter::RemoveListener(uint32_t signal, event_cb_t cb) {\n auto iter = event_map_.find(signal);\n if (iter == event_map_.end()) { return; }\n observer_list& observers = iter->second;\n observer_list::iterator iter_ob = observers.begin();\n while (iter_ob != observers.end()) {\n event_observer_t* observer = iter_ob->get();\n if (observer->cb == cb) {\n if (emmiting_event_ == signal) {\n (*iter_ob)->removed = true; // delay erase during emitting\n } else {\n observers.erase(iter_ob++);\n continue;\n }\n }\n ++iter_ob;\n }\n}\n\n\nvoid EventEmitter::RemoveListener(uint32_t signal, event_cb_t cb, void* ud) {\n auto iter = event_map_.find(signal);\n if (iter == event_map_.end()) { return; }\n observer_list& observers = iter->second;\n observer_list::iterator iter_ob = observers.begin();\n while (iter_ob != observers.end()) {\n event_observer_t* observer = iter_ob->get();\n if (observer->cb == cb && observer->ud == ud) {\n if (emmiting_event_ == signal) {\n (*iter_ob)->removed = true; // delay erase during emitting\n } else {\n observers.erase(iter_ob++);\n continue;\n }\n }\n ++iter_ob;\n }\n}\n\n\nvoid EventEmitter::RemoveAllListeners(uint32_t signal) {\n do {\n auto iter = event_map_.find(signal);\n if (iter == event_map_.end()) { break; }\n observer_list& observers = iter->second;\n while (!observers.empty()) {\n auto iter = observers.begin();\n if (!(*iter)->removed) {\n RemoveListener(signal, iter->get());\n }\n }\n } while (false);\n}\n\n\nvoid EventEmitter::Emit(uint32_t signal, const VariantList& args) {\n auto iter = event_map_.find(signal);\n if (iter == event_map_.end()) { return; }\n observer_list& observers = iter->second;\n emmiting_event_ = signal;\n for (size_t i = 0; i < observers.size(); ++i) {\n if (!observers[i]->removed) {\n observers[i]->Schedule(signal, args);\n }\n }\n emmiting_event_ = RESERVED_EVENT_ID;\n auto ob_iter = observers.begin();\n while (ob_iter != observers.end()) {\n if ((*ob_iter)->removed) {\n observers.erase(ob_iter++);\n } else {\n ++ob_iter;\n }\n }\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6731492280960083,
"alphanum_fraction": 0.6795228123664856,
"avg_line_length": 24.18107032775879,
"blob_id": "ebbe3964fd9122d8258e75c8198d67e8d51ac359",
"content_id": "9ee9282741d767b5fb505ff809b3f4bb1bf9fb0c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6211,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 243,
"path": "/test_modules/chat_module/chat_proto.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef CHAT_PROTO_H\n#define CHAT_PROTO_H\n\n#include <string>\n#include <vector>\n#include <sparrow/proto/base.h>\n\nusing namespace goku;\n\nenum {\n RPC_CHAT_ADDPLAYER = 1,\n RPC_CHAT_RMPLAYER,\n RPC_CHAT_SETNICKNAME,\n RPC_CHAT_WORLDMESSAGE,\n RPC_CHAT_WHISPERMESSAGE,\n};\n\n// client -> logic\n\n#define CC_HELP \"help\"\n#define CC_SETNICK \"setnick\"\n#define CC_CREATEROOM \"create_room\"\n#define CC_LEAVEROOM \"leave_room\"\n#define CC_ROOMMSG \"room_msg\"\n#define CC_WHSPMSG \"whsp_msg\"\n#define CC_WORLDMSG \"world_msg\"\n#define CC_LISTROOM \"list_room\"\n\n\nstruct ChatCommand : public goku::CommonHeader {\n /**\n * available command:\n * help 使用帮助\n * setnick <nick_name> 设置昵称\n * create_room <room_name> 创建/加入房间\n * leave_room <room_name> 离开房间\n * room_msg <room_name> <message> 房间内广播消息\n * whsp_msg <nick_name> <message> 私聊消息\n * world_msg <message> 世界消息\n * list_room 列出本服务器上的所有房间\n */\n std::string command_line;\n\nprivate:\n int DoDeserialize(const goku::VariantList& istream) override {\n CommonHeader::DoDeserialize(istream);\n command_line = istream.GetValue<const char*>(GetDeserializeOffet());\n return 0;\n }\n};\n\n\n// logic -> client\nstruct ChatMessage : public goku::CommonHeader {\n std::string message;\n\n ChatMessage() {\n command = 8899;\n }\n\nprivate:\n int DoSerialize(goku::VariantList& ostream) const override {\n goku::CommonHeader::DoSerialize(ostream);\n ostream.AddValue(message);\n return 0;\n }\n};\n\n\n// backend -> RPC\nstruct ChatRpc : public goku::ISerialize {\npublic:\n ChatRpc() { command = -1; }\n ~ChatRpc() {}\n uint32_t GetCommand() const { return command; }\n\nprotected:\n int DoSerialize(VariantList& ostream) const override {\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n return 0;\n }\n\nprotected:\n uint32_t command;\n};\n\n\nstruct ChatRpcAddPlayer : public ChatRpc {\n client_uuid_t cuuid;\n\n ChatRpcAddPlayer() {\n command = RPC_CHAT_ADDPLAYER;\n }\n\nprotected:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(cuuid.gate_id);\n ostream.AddValue(cuuid.conn_id);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n istream.GetValue(0, cuuid.gate_id);\n istream.GetValue(1, cuuid.conn_id);\n return 0;\n }\n};\n\n\nstruct ChatRpcRemovePlayer : public ChatRpcAddPlayer {\n ChatRpcRemovePlayer() {\n command = RPC_CHAT_RMPLAYER;\n }\n};\n\n\nstruct ChatRpcSetNickName : public ChatRpc {\n client_uuid_t cuuid;\n std::string nick_name;\n\n ChatRpcSetNickName() {\n command = RPC_CHAT_SETNICKNAME;\n }\n\nprotected:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(cuuid.gate_id);\n ostream.AddValue(cuuid.conn_id);\n ostream.AddValue(nick_name.c_str());\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n istream.GetValue(0, cuuid.gate_id);\n istream.GetValue(1, cuuid.conn_id);\n nick_name = istream.GetValue<const char*>(2);\n return 0;\n }\n};\n\n\nstruct ChatRpcWorldMessage : public ChatRpc {\n std::string sender;\n std::string content;\n\n ChatRpcWorldMessage() {\n command = RPC_CHAT_WORLDMESSAGE;\n }\n\nprotected:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(sender);\n ostream.AddValue(content.c_str());\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n sender = istream.GetValue<const char*>(0);\n content = istream.GetValue<const char*>(1);\n return 0;\n }\n};\n\n\nstruct ChatRpcWhisperMessage : public ChatRpc {\n std::string sender;\n std::string target_name;\n std::string content;\n\n ChatRpcWhisperMessage() {\n command = RPC_CHAT_WHISPERMESSAGE;\n }\n\nprotected:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(sender.c_str());\n ostream.AddValue(target_name.c_str());\n ostream.AddValue(content.c_str());\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n sender = istream.GetValue<const char*>(0);\n target_name = istream.GetValue<const char*>(1);\n content = istream.GetValue<const char*>(2);\n return 0;\n }\n};\n\n\n// RPC -> backend\nstruct ChatRpcSetNickNameResponse : public ChatRpc {\n client_uuid_t cuuid;\n std::string nick_name;\n uint8_t succeed;\n\n ChatRpcSetNickNameResponse() {\n command = RPC_CHAT_SETNICKNAME;\n }\n\nprotected:\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(cuuid.gate_id);\n ostream.AddValue(cuuid.conn_id);\n ostream.AddValue(nick_name.c_str());\n ostream.AddValue(succeed);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n istream.GetValue(0, cuuid.gate_id);\n istream.GetValue(1, cuuid.conn_id);\n nick_name = istream.GetValue<const char*>(2);\n istream.GetValue(3, succeed);\n return 0;\n }\n};\n\n#endif // CHAT_PROTO_H\n"
},
{
"alpha_fraction": 0.7297297120094299,
"alphanum_fraction": 0.7567567825317383,
"avg_line_length": 11.666666984558105,
"blob_id": "09ba2cad4edd1db42c6be5b4388a81fb15878b86",
"content_id": "238670960ae68f8f028e2311cc382d35d9f174de",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 37,
"license_type": "permissive",
"max_line_length": 26,
"num_lines": 3,
"path": "/bin/start_gate1.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "import os\n\nos.system(\"sparrow gate1\")"
},
{
"alpha_fraction": 0.7297297120094299,
"alphanum_fraction": 0.7567567825317383,
"avg_line_length": 11.666666984558105,
"blob_id": "5641f91582fdf9a1c7752a4a1fc4a8f5c9078302",
"content_id": "4b31f738d5758330d2b6146c8921017dcc7c75c2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 37,
"license_type": "permissive",
"max_line_length": 26,
"num_lines": 3,
"path": "/bin/start_gate3.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "import os\n\nos.system(\"sparrow gate3\")"
},
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.6666666865348816,
"avg_line_length": 14.5,
"blob_id": "0be0794681dbf7ef995b632733c5b4534aea5d55",
"content_id": "6f07affac52d216d9dae75dfdc1fb68bd0e91bbb",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 30,
"license_type": "permissive",
"max_line_length": 19,
"num_lines": 2,
"path": "/docs/TODO.md",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "* mix backend & RPC\n* LUA bind"
},
{
"alpha_fraction": 0.731721043586731,
"alphanum_fraction": 0.731721043586731,
"avg_line_length": 32.54716873168945,
"blob_id": "dc5ad498b0f7a05d9d68d9cc589d86e69974b841",
"content_id": "0648688061e9aac2ef9e91d74bbb05cf853e7e5e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1780,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 53,
"path": "/test_modules/chat_module/chat_room.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef CHAT_CHAT_ROOM_H\n#define CHAT_CHAT_ROOM_H\n\n#include <string>\n#include <memory>\n#include <set>\n#include <cstdint>\n\n#include \"chat_user.h\"\n\nclass ChatRoom {\npublic:\n ChatRoom(const std::string& room_name);\n ~ChatRoom();\n\n void Send(const std::string& message) {\n for (auto& iter : users_) {\n iter->Send(message);\n }\n }\n\n void AddUser(ChatUserPtr user);\n void RemoveUser(ChatUserPtr user);\n bool IsUserExist(ChatUserPtr user) const;\n size_t GetUserCount() const { return users_.size(); }\n\nprivate:\n const std::string room_name_;\n std::set<ChatUserPtr> users_; // users in this room\n};\n\n#endif // CHAT_CHAT_ROOM_H\n"
},
{
"alpha_fraction": 0.7555642127990723,
"alphanum_fraction": 0.7586880326271057,
"avg_line_length": 33.60810852050781,
"blob_id": "76da0c6354639fe8dcccf99d95c73eb523afaf0d",
"content_id": "7bb05f327f797ec88af844c4b10c821635113439",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2561,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 74,
"path": "/include/sparrow/gate_server.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_GATE_SERVER_H\n#define SPARROW_GATE_SERVER_H\n\n#include <memory>\n#include <string>\n#include <map>\n\n#include \"sparrow_define.h\"\n#include \"entity.h\"\n#include \"i_entity_creator.h\"\n#include \"proto/data_codec.h\"\n#include \"master_client.h\"\n\nNS_SPARROW_BEG\n\nclass TcpServer;\nclass GateClient;\nclass Connector;\n\nclass GateServer : public MasterClient {\npublic:\n DECL_ENTITY_CREATOR(GateServer)\n\n GateServer();\n ~GateServer();\n\nprotected:\n bool OnInit(const VariantList& args) override;\n void OnUninit() override;\n\nprivate:\n static void OnClientConnection(void* ud, connection_id_t connid);\n static void OnClientData(void* ud, connection_id_t connid, const void* data, int32_t sz);\n static void OnClientClose(void* ud, connection_id_t connid);\n static int HandleClientMessage(void* ud, uint16_t cmd, const VariantList& args);\n int HandleCommandLogic(GateClient* client, const VariantList& args);\n\n static void OnHarborCommand(void* ud, uint32_t command, const VariantList& args);\n int HandleMasterHarborReady(const VariantList& istream);\n int HandleGateOpenRequest(const VariantList& istream);\n int HandleTransClientData(const VariantList& istream);\n int SendToBackend(const HarborCommand& message);\n \nprivate:\n TcpServer* server_;\n std::string gate_addr_;\n uint16_t gate_port_;\n std::map<connection_id_t, std::unique_ptr<GateClient> > clients_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_GATE_SERVER_H\n"
},
{
"alpha_fraction": 0.702293336391449,
"alphanum_fraction": 0.7052661180496216,
"avg_line_length": 30.6771297454834,
"blob_id": "2f341cec6868c33d838036bca21de2f8c160eea2",
"content_id": "9a9429bebb2fec16543abcd23e59e7d63f8804ef",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 7064,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 223,
"path": "/src/sparrow/gate_server.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <string>\n#include <cstdint>\n\n#include <sparrow/sparrow.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/gate_server.h>\n#include <sparrow/proto/cluster.h>\n#include <sparrow/proto/client.h>\n#include <sparrow/tcp_server.h>\n#include <sparrow/gate_client.h>\n#include <sparrow/connector.h>\n#include <sparrow/sparrow_net.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/event_proto.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(GateServer)\n\n\nGateServer::GateServer() {\n server_ = nullptr;\n}\n\n\nGateServer::~GateServer() {\n}\n\n\nbool GateServer::OnInit(const VariantList& args) {\n GateServerInit cap;\n if (cap.Deserialize(args)) {\n return false;\n }\n if (!MasterClient::OnInit(args)) {\n return false;\n }\n gate_addr_ = cap.gate_addr;\n gate_port_ = cap.gate_port;\n AddHarborCommandListener(HBM_MASTERHARBORREADY, &GateServer::OnHarborCommand, this);\n AddHarborCommandListener(HBM_OPENGATE, &GateServer::OnHarborCommand, this);\n AddHarborCommandListener(HBM_TRANSCLIENTDATA, &GateServer::OnHarborCommand, this);\n return true;\n}\n\n\nvoid GateServer::OnUninit() {\n MasterClient::OnUninit();\n if (server_) {\n server_->SetConnectionCallback(nullptr, nullptr);\n server_->Release(); \n }\n RemoveHarborCommandListener(HBM_MASTERHARBORREADY, &GateServer::OnHarborCommand);\n RemoveHarborCommandListener(HBM_OPENGATE, &GateServer::OnHarborCommand);\n RemoveHarborCommandListener(HBM_TRANSCLIENTDATA, &GateServer::OnHarborCommand);\n}\n\n\nvoid GateServer::OnHarborCommand(void* ud, uint32_t command, const VariantList& args) {\n EventHarborCommand e_harbor_command;\n int ret = e_harbor_command.Deserialize(args);\n assert(!ret);\n auto self = (GateServer*)ud;\n switch (command) {\n case HBM_MASTERHARBORREADY:\n self->HandleMasterHarborReady(e_harbor_command.args);\n break;\n case HBM_OPENGATE:\n self->HandleGateOpenRequest(e_harbor_command.args);\n break;\n case HBM_TRANSCLIENTDATA:\n self->HandleTransClientData(e_harbor_command.args);\n break;\n default:\n assert(0);\n break;\n }\n}\n\n\nint GateServer::HandleMasterHarborReady(const VariantList& istream) {\n GateReady gate_ready;\n SendMasterCommand(gate_ready);\n return 0;\n}\n\n\nint GateServer::HandleGateOpenRequest(const VariantList& istream) {\n Trace(TraceLevel::Info, \"Now will open the gate...\");\n Trace(TraceLevel::Info, \"listen at [%s:%d]\", gate_addr_.c_str(), gate_port_);\n TcpServerInit server_cap;\n server_cap.listen_addr = gate_addr_;\n server_cap.listen_port = gate_port_;\n server_ = (TcpServer*)CreateEntity(\"TcpServer\", server_cap);\n if (server_) {\n server_->SetConnectionCallback(&GateServer::OnClientConnection, this);\n }\n OpenGateResponse response(HBM_OPENGATE, server_ ? 0 : -1);\n SendMasterCommand(response);\n return 0;\n}\n\n\nint GateServer::HandleTransClientData(const VariantList& istream) {\n TransClientData req;\n if (req.Deserialize(istream)) { return -1; }\n assert(GetSelfNodeId() == req.target.gate_id);\n auto iter = clients_.find(req.target.conn_id);\n if (iter == clients_.end()) {\n Trace(TraceLevel::Error, \"client not found\");\n assert(false);\n return -1;\n }\n GateClient* client = iter->second.get();\n IDataCodec* codec = client->GetCodec();\n assert(codec);\n net::SendPacket(req.target.conn_id, req.GetData(), req.GetSize(), codec);\n return 0;\n}\n\n\nint GateServer::SendToBackend(const HarborCommand& message) {\n auto cap = GetSelfNodeCap<GateNodeCap>();\n assert(cap);\n node_id_t backend_node_id = GetNodeIdOfName(cap->backend_server);\n assert(backend_node_id != INVALID_NODE_ID);\n return SendHarborCommand(backend_node_id, message);\n}\n\n\nvoid GateServer::OnClientConnection(void* ud, connection_id_t connid) {\n Trace(TraceLevel::Info, \"receive a client connection\");\n GateServer* self = (GateServer*)ud;\n auto connection_pool = GetConnectionPool();\n net::SetReadCallback(connid, &GateServer::OnClientData, self);\n net::SetCloseCallback(connid, &GateServer::OnClientClose, self);\n GateClient* client = new GateClient(self, connid);\n client->SetMessageHandler(&GateServer::HandleClientMessage, client);\n self->clients_.insert(std::make_pair(connid, \n std::unique_ptr<GateClient>(client)));\n ClientOnline notify;\n notify.client_uuid = client_uuid_t(GetSelfNodeId(), connid);\n self->SendToBackend(notify);\n}\n\n\nvoid GateServer::OnClientData(void* ud, connection_id_t connid,\n const void* data, int32_t sz) {\n GateServer* self = (GateServer*)ud;\n auto iter = self->clients_.find(connid);\n assert(iter != self->clients_.end());\n GateClient* client = iter->second.get();\n if (sz == 0) { return; }\n int result = -1;\n if (sz > 0) {\n result = client->ProcessData(data, sz);\n } \n if (result) {\n net::Disconnect(connid);\n }\n}\n\n\nvoid GateServer::OnClientClose(void* ud, connection_id_t connid) {\n Trace(TraceLevel::Info, \"closed connection to client\");\n GateServer* self = (GateServer*)ud;\n auto iter = self->clients_.find(connid);\n assert(iter != self->clients_.end());\n self->clients_.erase(iter);\n ClientOffline notify;\n notify.client_uuid = client_uuid_t(GetSelfNodeId(), connid);\n self->SendToBackend(notify);\n}\n\n\nint GateServer::HandleClientMessage(void* ud,\n uint16_t cmd,\n const VariantList& args) {\n GateClient* client = (GateClient*)ud;\n GateServer* self = client->gate_server();\n switch (cmd) {\n case CMD_AUTH:\n return 0;\n case CMD_LOGIC:\n return self->HandleCommandLogic(client, args);\n default:\n return -1;\n }\n}\n\n\nint GateServer::HandleCommandLogic(GateClient* client,\n const VariantList& args) {\n LogicRequest req;\n if (req.Deserialize(args)) { return -1; }\n client_uuid_t client_uuid(GetSelfNodeId(), client->connection_id());\n ClientLogicMessage notify(client_uuid, req.GetData(), req.GetDataSize());\n SendToBackend(notify);\n return 0;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7598488926887512,
"alphanum_fraction": 0.7603885531425476,
"avg_line_length": 33.314815521240234,
"blob_id": "2c5d3534971cf6507ad9d8c187e599b34fde3cf7",
"content_id": "12c4b929d5302e6e8d9d00a0a6cefbbe14b964f2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1853,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 54,
"path": "/include/sparrow/i_entity_creator.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_I_ENTITY_CREATOR_H\n#define SPARROW_I_ENTITY_CREATOR_H\n\n#include \"sparrow_define.h\"\n#include \"i_entity.h\"\n\nNS_SPARROW_BEG\n\nclass IEntityCreator {\npublic:\n virtual ~IEntityCreator() {}\n virtual IEntity* Create() = 0;\n};\n\n\n#define DECL_ENTITY_CREATOR(class_name)\\\n class IEntityCreator##class_name : public IEntityCreator {\\\n public:\\\n IEntity* Create() override { return new class_name; }\\\n };\\\n static IEntityCreator##class_name EntityCreator##class_name;\n\n\n#define IMPL_ENTITY_CREATOR(class_name)\\\n class_name::IEntityCreator##class_name class_name::EntityCreator##class_name;\n\n\n#define ENTITY_CREATOR(class_name) &class_name::EntityCreator##class_name\n\n\nNS_SPARROW_END\n\n#endif // SPARROW_I_ENTITY_CREATOR_H\n"
},
{
"alpha_fraction": 0.7125808000564575,
"alphanum_fraction": 0.7165589332580566,
"avg_line_length": 29.938461303710938,
"blob_id": "74463b16e60d3db5761fcef906aad7a32edcefcb",
"content_id": "18da6df4533f5a7acf2088a80554ea2240e6b584",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2013,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 65,
"path": "/include/sparrow/entity.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_DETAILS_ENTITY_H\n#define SPARROW_DETAILS_ENTITY_H\n\n#include \"i_entity.h\"\n\nNS_SPARROW_BEG\n\nclass Entity : public IEntity {\npublic:\n Entity() {\n ref_count_ = 1;\n entid_ = INVALID_ENTITY_ID;\n }\n\n uint32_t AddRef() override { return ++ref_count_; }\n\n void Release() override {\n --ref_count_;\n if (0 == ref_count_) {\n OnUninit();\n OnDestroy();\n delete this;\n }\n }\n\n uint32_t GetRefCount() const override { return ref_count_; }\n\n entid_t GetEntid() const override { return entid_; }\n\nprotected:\n bool OnCreate(const VariantList& args) override { return true; }\n bool OnInit(const VariantList& args) override { return true; }\n void OnUninit() override {}\n void OnDestroy() override {}\n\nprivate:\n uint32_t ref_count_;\n entid_t entid_;\n friend class EntityFactory;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_DETAILS_ENTITY_H\n"
},
{
"alpha_fraction": 0.7109726667404175,
"alphanum_fraction": 0.7127033472061157,
"avg_line_length": 31.100000381469727,
"blob_id": "c83fcc98272d5570f6af63b9c14fca11326118d8",
"content_id": "05989f9ce1c350c92b83906df78a5a4de5d14165",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2889,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 90,
"path": "/include/sparrow/entity_factory.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_ENTITY_FACTORY_H\n#define SPARROW_ENTITY_FACTORY_H\n\n#include <string>\n#include <map>\n#include <vector>\n\n#include \"sparrow_define.h\"\n#include \"i_entity.h\"\n#include \"i_entity_creator.h\"\n#include \"i_serialize.h\"\n#include \"i_entity_factory.h\"\n\nNS_SPARROW_BEG\n\nstruct EntityCreatArgs {\n std::string entity_name;\n VariantList creat_args;\n VariantList init_args;\n};\n\n\n/**\n * when you create an entity from the factory, you hold a reference to it,\n * so when you don't wanna to use it, you should release it.\n * EntityFactory holds every entity create in it,\n * and if EntityFactory is the last object which hold an entity,\n * then entity will be released from memory.\n */\nclass EntityFactory : public IEntityFactory {\npublic:\n EntityFactory();\n ~EntityFactory();\n\n int RegisterCreator(const std::string& entity_name,\n IEntityCreator* creator) override {\n auto iter = creators_.find(entity_name);\n if (iter != creators_.end()) { \n assert(0);\n return -1; \n }\n creators_.insert(std::make_pair(entity_name, creator));\n return 0;\n }\n\n int UnregisterCreator(const std::string& entity_name) override {\n auto iter = creators_.find(entity_name);\n if (iter == creators_.end()) { return -1; }\n creators_.erase(iter);\n return 0;\n }\n\n IEntityCreator* GetCreator(const std::string& entity_name) override {\n auto iter = creators_.find(entity_name);\n if (iter == creators_.end()) { return nullptr; }\n return iter->second;\n }\n\n IEntity* Create(const std::string& entity_name,\n const VariantList& creat_args,\n const VariantList& init_args) override;\n\nprivate:\n std::map<std::string, IEntityCreator*> creators_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_ENTITY_FACTORY_H\n"
},
{
"alpha_fraction": 0.6953991651535034,
"alphanum_fraction": 0.7063102126121521,
"avg_line_length": 27.200000762939453,
"blob_id": "c4d2bc5126592fdf8fb2c6026b4e21e868900490",
"content_id": "a6dffb4a4d1d358a8d8306ebf859f7c47d9be122",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5501,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 195,
"path": "/include/sparrow/entity_init.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_ENTITY_INIT_H\n#define SPARROW_ENTITY_INIT_H\n\n#include <string>\n#include <cstdint>\n#include <uv.h>\n\n#include \"sparrow_define.h\"\n#include \"i_serialize.h\"\n#include \"sparrow_net.h\"\n\nNS_SPARROW_BEG\n\nstruct TcpConnectionInit : public ISerialize {\n uv_tcp_t* handle;\n connection_id_t connid;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue<uv_tcp_t*>(handle);\n ostream.AddValue<connection_id_t>(connid);\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n handle = istream.GetValue<uv_tcp_t*>(0);\n connid = istream.GetValue<connection_id_t>(1);\n return 0;\n }\n};\n\n\nstruct TcpServerInit : public ISerialize {\n std::string listen_addr;\n uint16_t listen_port;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream << listen_addr.c_str() << listen_port;\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n listen_addr = istream.GetValue<const char*>(0);\n listen_port = istream.GetValue<uint16_t>(1);\n return 0;\n }\n};\n\n\nstruct MasterServerInit :public ISerialize {\n std::string server_addr;\n uint16_t server_port;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(server_addr.c_str());\n ostream.AddValue(server_port);\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n server_addr = istream.GetValue<const char*>(0);\n server_port = istream.GetValue<uint16_t>(1);\n return 0;\n }\n};\n\n\nstruct GateServerInit : public ISerialize {\n std::string gate_addr;\n uint16_t gate_port;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(gate_addr.c_str());\n ostream.AddValue(gate_port);\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n gate_addr = istream.GetValue<const char*>(0);\n gate_port = istream.GetValue<uint16_t>(1);\n return 0;\n }\n};\n\n\nstruct BackendServerInit : public ISerialize {\n std::string module_name;\n std::string init_args;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(module_name.c_str());\n ostream.AddValue(init_args.c_str());\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n module_name = istream.GetValue<const char*>(0);\n init_args = istream.GetValue<const char*>(1);\n return 0;\n }\n};\n\n\nstruct BackendLogicModuleInit : public ISerialize {\n std::string module_name;\n std::string module_init;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(module_name.c_str());\n ostream.AddValue(module_init.c_str());\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n module_name = istream.GetValue<const char*>(0);\n module_init = istream.GetValue<const char*>(1);\n return 0;\n }\n};\n\n\nstruct ConnectorInit : public ISerialize {\n std::string peer_addr;\n uint16_t peer_port;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(peer_addr.c_str());\n ostream.AddValue(peer_port);\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n peer_addr = istream.GetValue<const char*>(0);\n peer_port = istream.GetValue<uint16_t>(1);\n return 0;\n }\n};\n\n\nstruct HarborInit : public ISerialize {\n std::string listen_addr;\n uint16_t listen_port;\n node_id_t node_id;\n node_id_t master_node_id;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(listen_addr.c_str());\n ostream.AddValue(listen_port);\n ostream.AddValue(node_id);\n ostream.AddValue(master_node_id);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n listen_addr = istream.GetValue<const char*>(0);\n listen_port = istream.GetValue<uint16_t>(1);\n node_id = istream.GetValue<uint16_t>(2);\n master_node_id = istream.GetValue<node_id_t>(3);\n return 0;\n }\n};\n\n\nstruct UniversalLogicModuleInit : public ISerialize {\n std::string module_name;\n std::string init_args;\n\n int DoSerialize(VariantList& ostream) const override {\n ostream.AddValue(module_name.c_str());\n ostream.AddValue(init_args.c_str());\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n module_name = istream.GetValue<const char*>(0);\n init_args = istream.GetValue<const char*>(1);\n return 0;\n }\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_ENTITY_INIT_H\n"
},
{
"alpha_fraction": 0.675291895866394,
"alphanum_fraction": 0.6775007843971252,
"avg_line_length": 27.549549102783203,
"blob_id": "6b6e9d15d20235e5ccdb14667fa7ea0c07a39f1d",
"content_id": "20d9df5ee7127564df9ae4ffaa76b8a4d3199c3e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3169,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 111,
"path": "/src/sparrow/timer.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#include <assert.h>\n\n#include <sparrow/timer.h>\n#include <sparrow/trace_log.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(Timer)\n\nbool Timer::OnInit(const VariantList& args) {\n return true;\n}\n\n\nvoid Timer::OnUninit() {\n}\n\n\ntimer_id_t Timer::SetTimeOut(uint64_t interval, timer_cb_t cb, void* ud) {\n return CreateTimer(interval, false, cb, ud);\n}\n\n\nvoid Timer::ClearTimeOut(timer_id_t id) {\n ClearTimer(id, false);\n}\n\n\ntimer_id_t Timer::SetInterval(uint64_t interval, timer_cb_t cb, void* ud) {\n return CreateTimer(interval, true, cb, ud);\n}\n\n\nvoid Timer::ClearInterval(timer_id_t id) {\n ClearTimer(id, true);\n}\n\n\nvoid Timer::OnTimer(uv_timer_t* handle) {\n Timer* self = (Timer*)handle->data;\n assert(self);\n do {\n auto iter = self->once_timer_.find(handle);\n if (iter == self->once_timer_.end()) {\n iter = self->repeat_timer_.find(handle);\n assert(iter != self->repeat_timer_.end());\n }\n timer_descriptor& desc = iter->second;\n desc.Schedule();\n } while (false);\n self->ClearTimeOut(handle);\n}\n\n\nvoid Timer::OnClose(uv_handle_t* handle) {\n Timer* self = (Timer*)handle->data;\n if (!self) {\n free(handle);\n return;\n }\n}\n\n\ntimer_id_t Timer::CreateTimer(uint64_t interval,\n bool repeat,\n timer_cb_t cb,\n void* ud) {\n uv_timer_t* timer = (uv_timer_t*)malloc(sizeof(*timer));\n uv_timer_init(uv_default_loop(), timer);\n timer->data = this;\n uv_timer_start(timer, &Timer::OnTimer, 0, interval);\n auto& ts = repeat ? repeat_timer_ : once_timer_;\n timer_descriptor desc(cb, ud);\n ts.insert(std::make_pair(timer, desc));\n return timer;\n}\n\n\nvoid Timer::ClearTimer(timer_id_t id, bool repeat) {\n auto& ts = repeat ? repeat_timer_ : once_timer_;\n uv_timer_t* handle = (uv_timer_t*)id;\n auto iter = ts.find(handle);\n if (iter == ts.end()) { return; }\n handle->data = nullptr;\n uv_close((uv_handle_t*)handle, &Timer::OnClose);\n // can't free uv_timer_t here, delay free it in the callback\n ts.erase(iter);\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7099825143814087,
"alphanum_fraction": 0.7180384993553162,
"avg_line_length": 27.549999237060547,
"blob_id": "387dfbf6c65215b64dde87644e585570d467f151",
"content_id": "f2f245a0baf8fcf79b7db7bd727df3d13426bbd9",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2855,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 100,
"path": "/include/sparrow/event_emitter.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_EVENT_EMITTER_H\n#define SPARROW_EVENT_EMITTER_H\n\n#include <map>\n#include <vector>\n#include <memory>\n#include <cstdint>\n#include <cassert>\n#include <set>\n\n#include \"sparrow_define.h\"\n#include \"entity.h\"\n#include \"entity_factory.h\"\n#include \"i_serialize.h\"\n\nNS_SPARROW_BEG\n\ntypedef void* event_id_t;\ntypedef void(*event_cb_t)(void* ud, uint32_t signal, const VariantList& args);\n\nclass EventEmitter : public Entity {\npublic:\n DECL_ENTITY_CREATOR(EventEmitter)\n\n enum { RESERVED_EVENT_ID = 0xffffffff };\n\n event_id_t AddListener(uint32_t signal, event_cb_t cb, void* ud);\n\n void RemoveListener(uint32_t signal, event_id_t id);\n\n void RemoveListener(uint32_t signal, event_cb_t cb);\n\n void RemoveListener(uint32_t signal, event_cb_t cb, void* ud);\n\n void RemoveAllListeners(uint32_t signal);\n\n void Emit(uint32_t signal, const VariantList& args);\n\n void Emit(uint32_t signal, const ISerialize& args) {\n VariantList vars;\n const int ret = args.Serialize(vars);\n assert(!ret);\n Emit(signal, vars);\n }\n\nprotected:\n bool OnInit(const VariantList& args) override {\n emmiting_event_ = RESERVED_EVENT_ID;\n return true;\n }\n\n void OnUninit() override {}\n\nprivate:\n struct event_observer_t {\n event_cb_t cb;\n void* ud;\n bool removed;\n\n event_observer_t(event_cb_t cb, void* ud) {\n this->cb = cb;\n this->ud = ud;\n this->removed = false;\n }\n\n void Schedule(uint32_t signal, const VariantList& args) const {\n if (cb) { cb(ud, signal, args); }\n }\n };\n\nprivate:\n typedef std::vector<std::unique_ptr<event_observer_t> > observer_list;\n std::map<uint32_t, observer_list> event_map_;\n uint32_t emmiting_event_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_EVENT_EMITTER_H\n"
},
{
"alpha_fraction": 0.5238641500473022,
"alphanum_fraction": 0.531030535697937,
"avg_line_length": 36.5107536315918,
"blob_id": "c6171ca5c85061f4dd84f7eaad5a4fbdd2bf9120",
"content_id": "6370c20ce9f2678d268abd5bd1f04a43fb4d1c0c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6977,
"license_type": "permissive",
"max_line_length": 101,
"num_lines": 186,
"path": "/bin/chat_client/chat_client_gui.pyw",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n# Copyright http://www.gokulab.com. All rights reserved.\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\n\nimport sys\nimport Queue\nimport struct\nimport asyncore, socket\n\nfrom PyQt4 import QtCore, QtGui, QtNetwork\nfrom ui_chat import Ui_Chat\n\nfrom variant import VariantList\nfrom proto import *\n\nMSG_APPENDMSG = 1\n\nCMD_CHATMESSAGE = 88\n\nclass ChatIO(asyncore.dispatcher):\n def __init__(self, signal):\n asyncore.dispatcher.__init__(self)\n self.send_queue = Queue.Queue(maxsize = 512)\n self.signal = signal\n self.pending_data = ''\n\n def readable(self):\n return True\n\n def writable(self):\n return True\n \n def connnectGate(self, gate_addr, gate_port):\t\n self.create_socket(socket.AF_INET, socket.SOCK_STREAM)\n self.connect((gate_addr, gate_port))\t\n \n def sendChatMessage(self, message):\n self.send_queue.put(str(message))\n\n def handle_connect(self): \n print \"handle_connect\"\n self.signal.emit(MSG_APPENDMSG, \"connect to gate server succeed\")\n\n def handle_close(self):\n print \"handle_close\"\n self.signal.emit(MSG_APPENDMSG, \"disconnect to gate server\")\n\n def handle_read(self):\n print \"handle_read\"\n data = self.recv(4096)\n self.pending_data = self.pending_data + data\n data_sz= len(self.pending_data)\n if data_sz == 0:\n self.close()\n return\n while True:\n codec = DataCodecPrefixHeader()\n try:\n (pack_data, pack_sz) = codec.Decode(data)\n var = VariantList()\n var.Deserialize(pack_data)\n cm = ChatMessage()\n cm.Deserialize(var)\n self.signal.emit(MSG_APPENDMSG, str(cm.command) + \" : \" + cm.message)\n data = data[pack_sz:]\n except Exception, e:\n #self.signal.emit(MSG_APPENDMSG, str(e))\n break\n\n def handle_write(self):\n if not self.send_queue.empty():\n text = self.send_queue.get(False, 100)\n self.sendMessage(text)\n \n def handle_error(self):\n print \"handle_error\"\n self.signal.emit(MSG_APPENDMSG, \"handle_error\")\n \n def sendMessage(self, text):\n chat_cmd = ChatCommand()\n chat_cmd.text = text\n logic_req = LogicRequest()\n logic_req.message = chat_cmd\n SendToGate(self, logic_req)\n \nclass LoopThread(QtCore.QThread):\n notify = QtCore.pyqtSignal(int, str)\n\n def __init__(self, parent = None):\n super(LoopThread, self).__init__(parent)\n self.chatio = ChatIO(self.notify)\n \n def run(self):\n asyncore.loop()\n\n\nclass ChatClientWidget(QtGui.QWidget):\n def __init__(self):\n super(ChatClientWidget, self).__init__()\n self.ui = Ui_Chat()\n self.ui.setupUi(self)\n self.closing_wnd = False\n self.ui.lineEdit_gateaddr.setText('127.0.0.1:9005')\n\n\n @QtCore.pyqtSlot()\n def on_button_connect_clicked(self):\n if hasattr(self, \"chathread\"):\n self.appendMessage(\"thread already running...\")\n return\n gate_addr = self.ui.lineEdit_gateaddr.text()\n if len(gate_addr) == 0:\n self.appendMessage(\"format: '127.0.0.1:8000'\")\n return\n arr = gate_addr.split(\":\")\n if len(arr) != 2:\n self.appendMessage(\"format: '127.0.0.1:8000'\")\n return\n self.ui.button_connect.setEnabled(False)\n gate_addr = arr[0]\n gate_port = int(arr[1])\n self.chathread = LoopThread()\n self.chathread.notify.connect(self.onNotification)\n self.chathread.finished.connect(self.onThreadFinish)\n self.chathread.chatio.connnectGate(gate_addr, gate_port)\n self.chathread.start()\n\n @QtCore.pyqtSlot()\n def on_button_send_clicked(self):\n message = self.ui.lineEdit_message.text()\n if len(message) == 0:\n QtGui.QMessageBox.information(self, \"Chat\", \"Please input your message\")\n return\n self.chathread.chatio.sendChatMessage(message)\n self.ui.lineEdit_message.setText(\"\")\n\n\n def closeEvent(self, event):\n if not hasattr(self, \"chathread\"):\n return\n if self.chathread.isRunning ():\n self.appendMessage(\"Wait thread finish...\")\n self.closing_wnd = True\n self.chathread.chatio.close()\n event.ignore()\n\n\n def onNotification(self, cmd, msg):\n if cmd == MSG_APPENDMSG:\n self.appendMessage(msg)\n\n\n def onThreadFinish(self):\n delattr(self, \"chathread\")\n if self.closing_wnd:\n self.close()\n\n\n def appendMessage(self, message):\n self.ui.textBrowser_messages.insertPlainText(message + \"\\n\")\t\t\t\n\n\nif __name__ == '__main__':\n app = QtGui.QApplication(sys.argv)\n cc = ChatClientWidget()\n cc.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.7149046659469604,
"alphanum_fraction": 0.7149046659469604,
"avg_line_length": 35.0625,
"blob_id": "1d4e050b4494e01cedd839fd70d9f227d20418c2",
"content_id": "842a51a4f01fcbbfd5b5730d9fd07b3c10a7e71e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2310,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 64,
"path": "/src/sparrow/backend_logic_module.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <string>\n#include <assert.h>\n\n#include <sparrow/backend_logic_module.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/entity_init.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(BackendLogicModule)\n\nbool BackendLogicModule::OnInit(const VariantList& args) {\n BackendLogicModuleInit init_para;\n if (init_para.Deserialize(args)) { return false; }\n do {\n if (init_para.module_name.empty()) { break; }\n handle_ = LoadModule(init_para.module_name.c_str());\n if (!handle_) { break; }\n create_ = (create_backend_logic_t)GetSymbol(handle_, \"CreateBackendLogic\");\n if (!create_ ) {\n Trace(TraceLevel::Error, \"CreateBackendLogic not implement\");\n break;\n }\n destroy_ = (destroy_backend_logic_t)GetSymbol(handle_, \"DestroyBackendLogic\");\n if (!destroy_) {\n Trace(TraceLevel::Error, \"DestroyBackendLogic not implement\");\n break;\n }\n logic_ = create_(init_para.module_init.c_str());\n assert(logic_);\n return logic_ != nullptr;\n } while (false);\n return false;\n}\n\n\nvoid BackendLogicModule::OnUninit() {\n if (logic_) { destroy_(logic_); }\n assert(handle_);\n if (handle_) { UnloadModule(handle_); }\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.5226730108261108,
"alphanum_fraction": 0.5226730108261108,
"avg_line_length": 21,
"blob_id": "a2e3b1cb5d9370724220ef632be70ae23e73b5b8",
"content_id": "3fa9c11ad4d14c3b54404d597ea4dad133ae006a",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 419,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 19,
"path": "/build/gyp/jsoncpp.gyp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "{\n 'includes': [\n 'define.gypi'\n ],\n 'targets':[\n {\n 'target_name':'jsoncpp',\n 'type':'static_library',\n 'include_dirs':[\n '<(ROOT_DIR)/external/jsoncpp/include'\n ],\n 'sources':[\n '<(ROOT_DIR)/external/jsoncpp/src/json_reader.cpp',\n '<(ROOT_DIR)/external/jsoncpp/src/json_writer.cpp',\n '<(ROOT_DIR)/external/jsoncpp/src/json_value.cpp'\n ]\n }\n ]\n}\n\n"
},
{
"alpha_fraction": 0.7039636373519897,
"alphanum_fraction": 0.7064409852027893,
"avg_line_length": 28.180723190307617,
"blob_id": "eef2da73afbe0e5c0da1a1fef2eb406c2efa0733",
"content_id": "90d578bad7bb59aa3dfca83ea731243afa216350",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2422,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 83,
"path": "/include/sparrow/timer.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_TIMER_H\n#define SPARROW_TIMER_H\n\n#include <uv.h>\n#include <cstdint>\n#include <map>\n\n#include \"sparrow_define.h\"\n#include \"entity.h\"\n#include \"i_entity_creator.h\"\n\nNS_SPARROW_BEG\n\ntypedef void* timer_id_t;\ntypedef void(*timer_cb_t)(void* ud);\n\n\nstruct timer_descriptor {\n timer_descriptor(timer_cb_t cb, void* ud) {\n this->cb = cb;\n this->ud = ud;\n }\n\n void Schedule() const {\n if (cb) { cb(ud); }\n }\n\n timer_cb_t cb;\n void* ud;\n};\n\n\nclass Timer : public Entity {\npublic:\n DECL_ENTITY_CREATOR(Timer)\n\n timer_id_t SetTimeOut(uint64_t interval, timer_cb_t cb, void* ud);\n void ClearTimeOut(timer_id_t id);\n timer_id_t SetInterval(uint64_t interval, timer_cb_t cb, void* ud);\n void ClearInterval(timer_id_t id);\n\nprotected:\n bool OnInit(const VariantList& args) override;\n void OnUninit() override;\n\nprivate:\n static void OnTimer(uv_timer_t* handle);\n static void OnClose(uv_handle_t* handle);\n timer_id_t CreateTimer(uint64_t interval,\n bool repeat,\n timer_cb_t cb,\n void* ud);\n void ClearTimer(timer_id_t id, bool repeat);\n\nprivate:\n std::map<uv_timer_t*, timer_descriptor> once_timer_;\n std::map<uv_timer_t*, timer_descriptor> repeat_timer_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_TIMER_H\n"
},
{
"alpha_fraction": 0.6803181171417236,
"alphanum_fraction": 0.6854870915412903,
"avg_line_length": 30.04938316345215,
"blob_id": "7e4aaa05bcb36017b334053686791b662bae81ec",
"content_id": "49d4618f44067adb6f78462bab30c668a21ae821",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2515,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 81,
"path": "/src/sparrow/lua_module.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#include <sparrow/lua_module.h>\n#include <sparrow/trace_log.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(LuaModule)\n\nbool LuaModule::OnInit(const VariantList& args) {\n LuaModuleInit init;\n if (init.Deserialize(args)) { return false; }\n do {\n handle_ = luaL_newstate();\n if (!handle_) { break; }\n luaL_openlibs(handle_);\n if (luaL_loadfile(handle_, init.lua_file.c_str())) {\n Trace(TraceLevel::Error, \"lua load failed: %s\", init.lua_file.c_str());\n break;\n }\n if (lua_pcall(handle_, 0, 0, 0)) {\n Trace(TraceLevel::Error, \"lua parse failed: %s\", init.lua_file.c_str());\n break;\n }\n return true;\n } while (false);\n if (handle_) {\n lua_close(handle_);\n }\n return false;\n}\n\n\nvoid LuaModule::OnUninit() {\n lua_close(handle_);\n}\n\n\nvoid LuaModule::RegisterFunction(const char* name, void(*func)()) {\n}\n\n\nint LuaModule::Run() {\n do {\n lua_getglobal(handle_, \"sparrow_main\");\n if (lua_pcall(handle_, 0, 1, 0)) {\n Trace(TraceLevel::Error, \"sparrow_main call failed\");\n break;\n }\n if (!lua_isinteger(handle_, -1)) {\n Trace(TraceLevel::Error, \"sparrow_main return value error, not integer\");\n break;\n }\n const int64_t value = lua_tointeger(handle_, -1);\n lua_pop(handle_, 1);\n Trace(TraceLevel::Info, \"lua execute done, return value = %lld\", value);\n return 0;\n } while (false);\n return -1;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6779661178588867,
"alphanum_fraction": 0.6836158037185669,
"avg_line_length": 16.42622947692871,
"blob_id": "64f8df1ef50edae0e58497ecbcf098becd9dd0ec",
"content_id": "4f1aaf44dbfaa6c336edd1f0e8b8c9a2ef986cc2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1062,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 61,
"path": "/README.md",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "# sparrow\n\nJust for fun.\n\nYou can view the code of the `develop` branch.\n\n### Requirements\n##### Operate System:\n\t* linux\n\t* windows\n\t* osx\n##### Compiler:\n\tAny C++ compiler which support c++14\n\n### How to build\n\n#### Windows\n\t\nLaunch `visual studio 2013`, then open `build/msvc/build.sln`\n\n#### Linux / OSX\n\n* You should have already installed `gyp`.\n\n\tIf you runs on ubuntu, you can type the command:\n\t\t\n\t\tsudo apt-get install gyp\n\t\n\tOr, if you runs on osx, you can do this by homebrew:\n\t\n\t\tbrew install gyp\n\n* generate makefile/xcodeproj from gyp\n\n\t\tcd build/gyp\n\t\tgyp --depth=. sparrow.gyp\n\n* OK, now you can type the `make` command or open the xcode project file to build.\n \n\n### Depends\n* lua\n\n\tI think you have known what I wanna do with it.\n\n* libuv\n\n\tAn async I/O library, Node.JS is also constructed on the top of it.\n\n* jsoncpp\n\t\n\tI don't like xml, so I will use json if I need save some config file.\n\n\n### Other\n\nAny problems in my developing cycle will be saved in `docs/dev.md`.\n\nHomepage: [www.gokulab.com](http://www.gokulab.com)\n\nE-Mail: vxmker#gmail.com"
},
{
"alpha_fraction": 0.7340129613876343,
"alphanum_fraction": 0.7460611462593079,
"avg_line_length": 21.03061294555664,
"blob_id": "5d7f234c7891f2df62de08a910f118c27641e356",
"content_id": "6b63e06884067b4a21a24062ad85ce78fb793e66",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3918,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 98,
"path": "/docs/dev.md",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "## 简介\n\n### 服务器类型\n##### master\n\t作为中心服务器,所有其它的服务器都会连接到master,可以用来监控服务器集群的状态,以及一些跨服消息转发\n\n##### gate\n\t客户端直接连接到该类服务器,可以配置多个gate服务器。gate服务器与后端的logic服务器建立连接,并直接将客户端数据转发给logic服务器。\n\n##### backend\n\t处理客户端数据的业务逻辑。\n\n##### service\n\t这个类型我在后面新增的,一些服务(比如数据存储服务)只会由内部逻辑服务器来请求使用\n\n### 服务器启动流程\n不同类型的服务器启动有一定的时序要求,比如master服务器一定要首先启动。考虑到会在不同的物理机上来启动不同的服务器。所以我打算先在每台机器上运行一个`backdoor`服务,它的作用就是负责把机器上的一个进程拉起来。\n\n* 启动master\n* master通过backdoor来远程启动各台机器上的服务器进程\n* slave启动完成后向master发送CMD_ONLINEREQUEST\n* master收到所有slave的CMD_ONLINEREQUEST消息后后,向gate发送命令,让其连接对应的后端logic服务器\n* gate与对应的后端logic握手完成后,向master发送CMD_GATEREADYTOOPEN\n* master收到所有gate的CMD\\_GATEREADYTOOPEN消息后,向所有gate发送CMD_GATEOPENREQUEST\n* gate收到CMD_GATEOPENREQUEST后,此时就可处理客户端请求了\n\n### 特性\n* 单线程,异步IO\n* 分布式架构,gate、logic均支持任意多个节点\n* 消息透明压缩、加密(集群内的消息不会进行加密、压缩)\n* 配置简单,`assets\\nodes.json`\n* 易于扩展\n* 支持windows/linux/mac\n* 代码结构清晰,无任何冗余设计\n\n### 拓扑结构\n\n## C++ FAQ\n\n1. 可变参数列表,`std::string`\n\n\t\tvoid Trace(TraceLevel level, const char* format, ...);\n\n2. `uv_tcp_t`是否属于POD类型?\n\n\t\t快速浏览了一下uv代码,在init的时候,会将queue加入到loop,所以拷贝时会有问题。\n\n3. `std::function`的实现\n\n\t在实现`Timer`的时候,考虑回调采用轻量级的C函数,还是采用更加灵活的C++11的`std::function`,以支持任意的参数绑定。\n\n\t`operator()`也是可以继承的,父类实现了的话,子类对象亦可调用。\n\n\t宏居然还能这么玩,第一次见:\n\n\t\t#define FUCK(A, B)\\\n\t\t std::cout << A; // 这里面不能使用参数B\n\t\t\n\t\tint main(int argc, char* argv) {\n\t\t FUCK(2, ) // 逗号还是要加的\n\t\t return 0;\n\t\t}\n\n\n\tstd::function可以绑定C函数,C++成员函数,而且还支持绑定多个形参。\n\n4. libuv中的异步操作\n\n\tlibuv的异步回调都是C函数,只能在handle上绑定一个void*用来储存对应请求上下文的数据。实际中我都是绑定一个对象指针。那么问题是,如何保证回调call过来的时候,这个对象指针还有效(而不是一个野指针)。我脑中浮过如下几个方法:\n\n\t* 对象使用引用计数,发起异步请求时增加引用计数,请求完成时减少引用计数。这种方法又会带来一些不好的设计,对象需要再提供一个明确的接口以能够取消异步请求,否则要是异步请求一直不会完成,就造成了内存泄露。\n\n\n5. 注意`asyncore`的`handle_connect`触发时机啊卧槽\n\n6. 失误写了如下代码:\n\n\t\tstruct connect_req_t {\n\t\t connect_req_t(const std::string addr, uint16_t port) {\n\t\t memset(this->addr, 0, sizeof(addr)); // over flow\n\t\t assert(addr.size() < sizeof(this->addr));\n\t\t strcpy(this->addr, addr.c_str());\n\t\t this->port = port;\n\t\t }\n\t\t uv_connect_t req;\n\t\t char addr[32];\n\t\t uint16_t port;\n\t\t};\n\n\tmemset的时候内存越界了(后来测试时发现vs2013上sizeof(std::string)为104),蛋疼的是,memset时没报错,最后delete这个结构内存的时候才触发`_CrtIsValidHeapPointer`。\n\t\t\n\n\n## python FAQ\n\n1. struct.unpack ,\n\n2. __str__"
},
{
"alpha_fraction": 0.6049571633338928,
"alphanum_fraction": 0.6083231568336487,
"avg_line_length": 28.709091186523438,
"blob_id": "87a5c35dec2e79524e15472079c0e9ea01f85e67",
"content_id": "a24e7db5d7a380820363a3cf2d72a6ba9c3856f2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3270,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 110,
"path": "/include/sparrow/utils/string_utils.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_UTILS_STRING_UTILS_H\n#define SPARROW_UTILS_STRING_UTILS_H\n\n#include <string.h>\n#include <string>\n#include <vector>\n#include <cstdarg>\n\n#include \"../sparrow_define.h\"\n\nNS_SPARROW_BEG\n\nclass StringUtils {\npublic:\n static std::string Format(const char* format, ...) {\n char buffer[128];\n size_t sz = sizeof(buffer);\n char* new_buffer = buffer;\n va_list args;\n va_start(args, format);\n while (true) {\n#ifdef _MSC_VER\n#pragma warning(push)\n#pragma warning(disable: 4996)\n#endif // _MSC_VER\n const int result = vsnprintf(new_buffer, sz, format, args);\n#ifdef _MSC_VER\n#pragma warning(pop)\n#endif // _MSC_VER\n if (result >= 0 && result < (int)sz) { break; }\n sz <<= 1;\n new_buffer = (char*)realloc(new_buffer != buffer ? new_buffer : nullptr,\n sz);\n }\n const std::string str = new_buffer;\n if (new_buffer != buffer) { free(new_buffer); }\n return str;\n }\n\n\n static void Trim(std::string& str) {\n const char* beg = str.c_str();\n const char* end = str.c_str() + str.length() - 1;\n while (beg <= end) {\n if (*beg == ' ') { \n ++beg; \n } else {\n break;\n }\n }\n while (end > beg) {\n if (*end == ' ') {\n --end;\n } else {\n break;\n }\n }\n std::string result(beg, end + 1);\n if (str != result) { str = result; }\n }\n\n\n static std::vector<std::string> Split(const std::string& str,\n const std::string& sep) {\n std::vector<std::string> result;\n if (sep.empty()) {\n result.push_back(str);\n } else {\n const char* beg = str.c_str();\n const char* end = nullptr;\n while (true) {\n const char* end = strstr(beg, sep.c_str());\n if (end) {\n if (beg != end) { result.push_back(std::string(beg, end)); }\n beg = end + sep.length();\n if (beg == str.c_str() + str.length()) { break; }\n } else {\n result.push_back(std::string(beg));\n break;\n }\n }\n }\n return result;\n }\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_UTILS_STRING_UTILS_H\n"
},
{
"alpha_fraction": 0.7297297120094299,
"alphanum_fraction": 0.7567567825317383,
"avg_line_length": 11.666666984558105,
"blob_id": "21c4ba759016c5dccba4dd9be44bd977b34e8ade",
"content_id": "2821b45327ef580f3cd560d55b1713773b778fb5",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 37,
"license_type": "permissive",
"max_line_length": 26,
"num_lines": 3,
"path": "/bin/start_gate2.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "import os\n\nos.system(\"sparrow gate2\")"
},
{
"alpha_fraction": 0.6967858672142029,
"alphanum_fraction": 0.6982758641242981,
"avg_line_length": 31.288660049438477,
"blob_id": "ffef28b12b96655a632c5e42667a7be61c0a7149",
"content_id": "8666a1dc92aa28ccceeb7e78ca2f09dc47450def",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9398,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 291,
"path": "/src/sparrow/sparrow_application.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <fstream>\n#include <assert.h>\n#include <stdio.h>\n#include <json/reader.h>\n\n#include <sparrow/sparrow_application.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/tcp_server.h>\n#include <sparrow/tcp_connection.h>\n#include <sparrow/timer.h>\n#include <sparrow/lua_module.h>\n#include <sparrow/sparrow_console.h>\n#include <sparrow/event_emitter.h>\n#include <sparrow/connector.h>\n#include <sparrow/master_server.h>\n#include <sparrow/gate_server.h>\n#include <sparrow/backend_server.h>\n#include <sparrow/backend_logic_module.h>\n#include <sparrow/connection_pool.h>\n#include <sparrow/harbor.h>\n#include <sparrow/harbor_command_sched.h>\n#include <sparrow/kernel.h>\n#include <sparrow/crash_dump.h>\n\nNS_SPARROW_BEG\n\nSparrowApplication::SparrowApplication() {\n cluster_codec_.reset(new DataCodecPrefixHead());\n}\n\n\nSparrowApplication::~SparrowApplication() {\n}\n\n\nint SparrowApplication::Initial(int argc, char* argv[]) {\n crash_dump_init();\n LoadAssets();\n ParseCommandLine(argc, argv);\n RegisterCreators();\n CreateFoundationEntities();\n StartServer();\n return 0;\n}\n\n\nvoid SparrowApplication::Uninitial() {\n for (auto iter : entities_) {\n iter.second->Release();\n }\n if (entity_factory_) { delete entity_factory_; }\n // To prevent memory leak, make sure all close_cb will be called.\n uv_run(uv_default_loop(), UV_RUN_DEFAULT);\n}\n\n\nint SparrowApplication::Run() {\n uv_loop_t* loop = uv_default_loop();\n uv_run(loop, UV_RUN_DEFAULT);\n return 0;\n}\n\n\nvoid SparrowApplication::LoadAssets() {\n std::ifstream ifs;\n const std::string assets_dir = GetAssetsDir();\n ifs.open(assets_dir + \"nodes.json\");\n assert(ifs.is_open());\n Json::Reader reader;\n Json::Value root;\n if (!reader.parse(ifs, root, false)) {\n Trace(TraceLevel::Error, \"assets/nodes.json parse failed\");\n }\n Json::Value& master = root[\"master\"];\n auto master_cap = std::make_unique<MasterNodeCap>();\n master_cap->node_id = master[\"node_id\"].asUInt();\n master_cap->node_name = master[\"node_name\"].asString();\n master_cap->harbor_addr = master[\"harbor_addr\"].asString();\n master_cap->harbor_port = master[\"harbor_port\"].asUInt();\n server_caps_.insert(std::make_pair(master_cap->node_id, std::move(master_cap)));\n\n Json::Value& gate = root[\"gate\"];\n Json::Value& nodes = gate[\"nodes\"];\n assert(nodes.isArray());\n for (unsigned int i = 0; i < nodes.size(); ++i) {\n auto gate_cap = std::make_unique<GateNodeCap>();\n gate_cap->node_id = nodes[i][\"node_id\"].asUInt();\n gate_cap->node_name = nodes[i][\"node_name\"].asString();\n gate_cap->harbor_addr = nodes[i][\"harbor_addr\"].asString();\n gate_cap->harbor_port = nodes[i][\"harbor_port\"].asUInt();\n gate_cap->listen_addr = nodes[i][\"listen_addr\"].asString();\n gate_cap->listen_port = nodes[i][\"listen_port\"].asUInt();\n gate_cap->backend_server = nodes[i][\"backend_server_name\"].asString();\n server_caps_.insert(std::make_pair(gate_cap->node_id, std::move(gate_cap)));\n }\n\n Json::Value& backend = root[\"backend\"];\n nodes = backend[\"nodes\"];\n assert(nodes.isArray());\n for (unsigned int i = 0; i < nodes.size(); ++i) {\n auto backend_cap = std::make_unique<BackendNodeCap>();\n backend_cap->node_id = nodes[i][\"node_id\"].asUInt();\n backend_cap->node_name = nodes[i][\"node_name\"].asString();\n backend_cap->harbor_addr = nodes[i][\"harbor_addr\"].asString();\n backend_cap->harbor_port = nodes[i][\"harbor_port\"].asUInt();\n backend_cap->entry_module = nodes[i][\"entry_module\"].asString();\n backend_cap->init_args = nodes[i][\"entry_args\"].asString();\n server_caps_.insert(std::make_pair(backend_cap->node_id, std::move(backend_cap)));\n }\n\n // check server id/name conflict\n std::set<node_id_t> node_ids;\n std::set<std::string> server_names;\n for (auto& iter : server_caps_) {\n node_ids.insert(iter.second->node_id);\n server_names.insert(iter.second->node_name);\n }\n if (node_ids.size() != server_caps_.size() \n || server_names.size() != server_caps_.size()) {\n Trace(TraceLevel::Error, \"server id/name conflict, please check 'nodes.json'\");\n assert(0);\n exit(-1);\n }\n}\n\n\nvoid SparrowApplication::ParseCommandLine(int argc, char* argv[]) {\n assert(argc == 2);\n std::string server_name = argv[1];\n node_id_ = GetNodeIdByName(server_name);\n if (node_id_ == INVALID_NODE_ID) {\n Trace(TraceLevel::Error, \"invalid argument, server name don't exist\");\n assert(0);\n exit(-1);\n }\n}\n\n\nvoid SparrowApplication::RegisterCreators() {\n entity_factory_ = new EntityFactory();\n#define REG_CREATOR(entity_class)\\\n entity_factory_->RegisterCreator(#entity_class, ENTITY_CREATOR(entity_class))\n REG_CREATOR(Kernel);\n REG_CREATOR(Timer);\n REG_CREATOR(EventEmitter);\n REG_CREATOR(LuaModule);\n REG_CREATOR(TcpServer);\n REG_CREATOR(Connector);\n REG_CREATOR(GateServer);\n REG_CREATOR(MasterServer);\n REG_CREATOR(TcpConnection);\n REG_CREATOR(SparrowConsole);\n REG_CREATOR(BackendServer);\n REG_CREATOR(BackendLogicModule);\n REG_CREATOR(ConnectionPool);\n REG_CREATOR(Harbor);\n REG_CREATOR(HarborCommandSched);\n#undef REG_CREATOR\n}\n\n\nvoid SparrowApplication::CreateFoundationEntities() {\n SetGlobalEntity(GLOBAL_ENT_KERNEL, CreateEntity(\"Kernel\"));\n SetGlobalEntity(GLOBAL_ENT_CONNECTIONPOOL, CreateEntity(\"ConnectionPool\"));\n SetGlobalEntity(GLOBAL_ENT_EVENTEMITTER, CreateEntity(\"EventEmitter\"));\n SetGlobalEntity(GLOBAL_ENT_SPARROWCONSOLE, CreateEntity(\"SparrowConsole\"));\n SetGlobalEntity(GLOBAL_ENT_HARBORCMDSCHED, CreateEntity(\"HarborCommandSched\"));\n\n // harbor\n auto server_cap = GetNodeCap(node_id_);\n HarborInit harbor_init;\n harbor_init.node_id = node_id_;\n harbor_init.listen_addr = server_cap->harbor_addr;\n harbor_init.listen_port = server_cap->harbor_port;\n if (node_id_ == GetMasterCap().node_id) {\n harbor_init.master_node_id = INVALID_NODE_ID;\n } else {\n harbor_init.master_node_id = GetMasterCap().node_id;\n }\n SetGlobalEntity(GLOBAL_ENT_HARBOR, CreateEntity(\"Harbor\", harbor_init));\n\n}\n\n\nvoid SparrowApplication::StartServer() {\n auto iter = server_caps_.find(node_id_);\n assert(iter != server_caps_.end());\n NodeCap* cap = iter->second.get();\n switch (cap->node_type) {\n case NodeType::Master:\n StartMasterServer(dynamic_cast<MasterNodeCap&>(*cap));\n break;\n case NodeType::Gate:\n StartGateServer(dynamic_cast<GateNodeCap&>(*cap));\n break;\n case NodeType::Backend:\n StartBackendServer(dynamic_cast<BackendNodeCap&>(*cap));\n break;\n default:\n assert(0);\n break;\n }\n}\n\n\nvoid SparrowApplication::StartMasterServer(const MasterNodeCap& cap) {\n MasterServerInit init;\n init.server_addr = cap.harbor_addr;\n init.server_port = cap.harbor_port;\n IEntity* entity = CreateEntity(\"MasterServer\", init);\n if (!entity) {\n Trace(TraceLevel::Error, \"MasterServer create failed\");\n exit(-1);\n }\n SetGlobalEntity(GLOBAL_ENT_MASTERSERVER, entity);\n}\n\n\nvoid SparrowApplication::StartGateServer(const GateNodeCap& cap) {\n GateServerInit gate_server_cap;\n gate_server_cap.gate_addr = cap.listen_addr;\n gate_server_cap.gate_port = cap.listen_port;\n IEntity* entity = CreateEntity(\"GateServer\", gate_server_cap);\n if (!entity) {\n Trace(TraceLevel::Error, \"GateServer create failed\");\n exit(-1);\n }\n SetGlobalEntity(GLOBAL_ENT_GATESERVER, entity);\n}\n\n\nvoid SparrowApplication::StartBackendServer(const BackendNodeCap& cap) {\n BackendServerInit args;\n args.module_name = cap.entry_module;\n args.init_args = cap.init_args;\n IEntity* entity = CreateEntity(\"BackendServer\", args);\n if (!entity) {\n Trace(TraceLevel::Error, \"BackendServer create failed\");\n exit(-1);\n }\n SetGlobalEntity(GLOBAL_ENT_BACKENDSERVER, entity);\n}\n\n\nnode_id_t SparrowApplication::GetNodeIdByName(const std::string& server_name) {\n for (auto& iter : server_caps_) {\n if (iter.second->node_name == server_name) {\n return iter.second->node_id;\n }\n }\n return INVALID_NODE_ID;\n}\n\n\nconst MasterNodeCap& SparrowApplication::GetMasterCap() {\n static MasterNodeCap* cap = nullptr;\n if (!cap) {\n for (auto& iter : server_caps_) {\n if (iter.second->node_type == NodeType::Master) {\n cap = dynamic_cast<MasterNodeCap*>(iter.second.get());\n break;\n }\n }\n }\n return *cap;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6336134672164917,
"alphanum_fraction": 0.6340936422348022,
"avg_line_length": 35.21739196777344,
"blob_id": "a18fc6dfb19a1a6fe7c246fea7e0fcf0efad713a",
"content_id": "2191f6b1d0718d7e0cd6cfe50214bbee03ab6369",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4265,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 115,
"path": "/test_modules/chat_module/chat_backend_logic.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef CHAT_MODULE_H\n#define CHAT_MODULE_H\n\n#include <vector>\n#include <string>\n#include <map>\n#include <set>\n#include <memory>\n#include <sparrow/i_backend_logic.h>\n\n#include \"chat_proto.h\" \n#include \"chat_room.h\"\n#include \"chat_user.h\"\n\nclass ChatBackendLogic : public IBackendLogic {\npublic:\n ChatBackendLogic();\n ~ChatBackendLogic();\n void OnPlayerEnter(IKernel* kernel, IClient* player) override;\n void OnPlayerExit(IKernel* kernel, IClient* player) override;\n void OnPlayerMessage(IKernel* kernel, IClient* player,\n const void* message, size_t sz) override;\n\nprivate:\n typedef void(ChatBackendLogic::*cmd_handler_t)(IKernel*,\n IClient*,\n const std::vector<std::string>&);\n /** 命令帮助 */\n void HandleHelp(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 设置昵称 */\n void HandleSetNick(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 创建/加入房间 */\n void HandleCreateRoom(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 离开房间 */\n void HandleLeaveRoom(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 发送聊天消息 */\n void HandleSendRoomMsg(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 发送私聊消息 */\n void HandleSendWhspMsg(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 发送世界消息 */\n void HandleSendWorldMsg(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\n /** 列出当前后端服务器上的房间 */\n void HandleListRoom(IKernel* kernel,\n IClient* player,\n const std::vector<std::string>& command);\n\nprivate:\n ChatUserPtr GetChatUser(IClient* player);\n ChatUserPtr GetChatUser(const std::string& nick_name);\n ChatUserPtr GetChatUser(const client_uuid_t& cuuid);\n bool IsRoomExist(const std::string& room_name) const;\n ChatRoom* CreateRoom(const std::string& room_name);\n ChatRoom* GetRoom(const std::string& room_name);\n\n int SheduleChatRpc(IKernel* kernel, const ChatRpc& chat_rpc);\n static void OnRpcInvokeReturn(uint32_t func, \n int result, \n const VariantList& value, \n void* ud);\n\n void HandleRpcResponseSetNickName(int result, const VariantList& value);\n\nprivate:\n typedef std::unique_ptr<ChatRoom> ChatRoomPtr;\n std::map<std::string, ChatRoomPtr> chat_rooms_;\n std::map<IClient*, ChatUserPtr> chat_users_;\n std::map<std::string, cmd_handler_t> handlers_;\n\nprivate:\n IKernel* kernel_;\n};\n\n#endif // CHAT_MODULE_H\n"
},
{
"alpha_fraction": 0.46396124362945557,
"alphanum_fraction": 0.46396124362945557,
"avg_line_length": 30.44761848449707,
"blob_id": "20aedca0fe2a3b882cdc0a3b86b63826e339da1e",
"content_id": "cdb06cee2fdb9017c408a023f3ef14c5cfb64ac3",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3302,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 105,
"path": "/build/gyp/libuv.gyp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "{\n 'includes': [\n 'define.gypi'\n ],\n 'target_defaults': {\n 'default_configuration': 'Debug',\n 'configurations': {\n 'Debug': {\n },\n 'Release': {\n },\n 'Ad-hoc': {\n },\n 'AppStore': {\n },\n 'Enterprise': {\n },\n },\n },\n 'targets':[\n {\n 'target_name':'uv',\n 'type':'static_library',\n 'include_dirs':[\n '<(ROOT_DIR)/external/libuv/include',\n '<(ROOT_DIR)/external/libuv/src'\n ],\n 'sources':[\n '<(ROOT_DIR)/external/libuv/src/fs-poll.c',\n '<(ROOT_DIR)/external/libuv/src/inet.c',\n '<(ROOT_DIR)/external/libuv/src/threadpool.c',\n '<(ROOT_DIR)/external/libuv/src/uv-common.c',\n '<(ROOT_DIR)/external/libuv/src/version.c',\n '<(ROOT_DIR)/external/libuv/src/unix/core.c', \n '<(ROOT_DIR)/external/libuv/src/unix/poll.c', \n '<(ROOT_DIR)/external/libuv/src/unix/tcp.c',\n '<(ROOT_DIR)/external/libuv/src/unix/fs.c', \n '<(ROOT_DIR)/external/libuv/src/unix/process.c', \n '<(ROOT_DIR)/external/libuv/src/unix/thread.c',\n '<(ROOT_DIR)/external/libuv/src/unix/async.c', \n '<(ROOT_DIR)/external/libuv/src/unix/loop.c', \n '<(ROOT_DIR)/external/libuv/src/unix/proctitle.c', \n '<(ROOT_DIR)/external/libuv/src/unix/timer.c',\n '<(ROOT_DIR)/external/libuv/src/unix/core.c', \n '<(ROOT_DIR)/external/libuv/src/unix/getaddrinfo.c', \n '<(ROOT_DIR)/external/libuv/src/unix/loop-watcher.c', \n '<(ROOT_DIR)/external/libuv/src/unix/tty.c', \n '<(ROOT_DIR)/external/libuv/src/unix/getnameinfo.c', \n '<(ROOT_DIR)/external/libuv/src/unix/signal.c', \n '<(ROOT_DIR)/external/libuv/src/unix/udp.c', \n '<(ROOT_DIR)/external/libuv/src/unix/stream.c',\n '<(ROOT_DIR)/external/libuv/src/unix/dl.c', \n '<(ROOT_DIR)/external/libuv/src/unix/pipe.c',\n ],\n 'conditions':[\n [\n 'OS==\"linux\"',\n {\n 'sources':[ \n '<(ROOT_DIR)/external/libuv/src/unix/linux-core.c',\n '<(ROOT_DIR)/external/libuv/src/unix/linux-inotify.c',\n '<(ROOT_DIR)/external/libuv/src/unix/linux-syscalls.c', \n ]\n }\n ],\n [\n 'OS==\"unix\"',\n {\n 'sources':[\n '<(ROOT_DIR)/external/libuv/src/unix/kqueue.c'\n ]\n }\n ],\n [\n 'OS==\"freebsd\"',\n {\n 'sources':[\n '<(ROOT_DIR)/external/libuv/src/unix/freebsd.c'\n ]\n }\n ], \n [\n 'OS==\"android\"',\n {\n 'sources':[\n '<(ROOT_DIR)/external/libuv/src/unix/android-ifaddrs.c',\n '<(ROOT_DIR)/external/libuv/src/unix/pthread-fixes.c'\n ]\n }\n ],\n [\n 'OS==\"mac\"',\n {\n 'sources':[\n '<(ROOT_DIR)/external/libuv/src/unix/fsevents.c',\n '<(ROOT_DIR)/external/libuv/src/unix/darwin.c',\n '<(ROOT_DIR)/external/libuv/src/unix/darwin-proctitle.c',\n '<(ROOT_DIR)/external/libuv/src/unix/kqueue.c'\n ]\n }\n ]\n ]\n }\n ]\n}\n"
},
{
"alpha_fraction": 0.7150886058807373,
"alphanum_fraction": 0.7176826596260071,
"avg_line_length": 32.5217399597168,
"blob_id": "5444706eb24d0c996c93961412eb6c3965dd5f64",
"content_id": "41148adc134ae0309aea9f80446f27685f15e07a",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2315,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 69,
"path": "/include/sparrow/stream_handler.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_NETWORK_STREAM_HANDLER_H\n#define SPARROW_NETWORK_STREAM_HANDLER_H\n\n#include <memory>\n\n#include \"sparrow_console.h\"\n#include \"proto/data_codec.h\"\n#include \"variant.h\"\n\nNS_SPARROW_BEG\n\ntypedef int (*message_handler_t)(void* ud,\n uint16_t cmd,\n const VariantList& args);\n\n\nclass StreamHandler {\npublic:\n StreamHandler();\n virtual ~StreamHandler();\n\n int ProcessData(const void* data, size_t sz);\n \n size_t PendingSize() const { return data_.size(); }\n void GetPendingData(std::vector<char>& data) const { data = data_; }\n\n void SetCodec(IDataCodec* codec) { codec_ = codec; }\n IDataCodec* GetCodec() const { return codec_; }\n\n void SetMessageHandler(message_handler_t handler, void* ud) {\n message_handler_ = handler;\n message_handler_ud_ = ud;\n }\n\nprotected:\n int ProcessMessage(uint16_t cmd, const VariantList& args);\n virtual int DoProcessMessage(uint16_t cmd, const VariantList& args);\n\nprivate:\n std::vector<char> data_;\n IDataCodec* codec_;\n message_handler_t message_handler_;\n void* message_handler_ud_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_NETWORK_STREAM_HANDLER_H\n"
},
{
"alpha_fraction": 0.689952552318573,
"alphanum_fraction": 0.689952552318573,
"avg_line_length": 33.6119384765625,
"blob_id": "dea079c3c96854cf0b1ced3f0c3609d9b8ed7854",
"content_id": "be4961a65f027b3ef0f0f8a3d0c4fc7138e81258",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2319,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 67,
"path": "/src/sparrow/entity_factory.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#include <assert.h>\n#include <sparrow/entity_factory.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/entity.h>\n\nNS_SPARROW_BEG\n\nEntityFactory::EntityFactory() {\n}\n\n\nEntityFactory::~EntityFactory() {\n}\n\n\nIEntity* EntityFactory::Create(const std::string& entity_name,\n const VariantList& creat_args,\n const VariantList& init_args) {\n IEntity* entity = nullptr;\n bool created = false; // flag to indicate create succeed, but init failed\n do {\n IEntityCreator* creator = GetCreator(entity_name);\n if (!creator) {\n Trace(TraceLevel::Error, \"creator not found: %s\", entity_name.c_str());\n break; \n }\n entity = creator->Create();\n if (!entity) { break; }\n if (!entity->OnCreate(creat_args)) { \n Trace(TraceLevel::Error, \"OnCreate failed: %s\", entity_name.c_str());\n break; \n }\n created = true;\n if (!entity->OnInit(init_args)) { \n Trace(TraceLevel::Error, \"OnInit failed: %s\", entity_name.c_str());\n break; \n }\n entity->AddRef();\n return entity;\n } while (false);\n if (created) { entity->OnDestroy(); }\n if (entity) { delete entity; }\n return nullptr;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.744080126285553,
"alphanum_fraction": 0.744990885257721,
"avg_line_length": 38.92727279663086,
"blob_id": "0614383cf0241034d94c0c3fd4a558731cc8f3dd",
"content_id": "c053f5dd142f891972c2ca41d8d274d1ecbd4fbb",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2198,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 55,
"path": "/test_modules/chat_module/chat_rpc_logic.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef CHAT_SERVICE_LOGIC_H\n#define CHAT_SERVICE_LOGIC_H\n\n#include <set>\n#include <string>\n#include <map>\n#include <memory>\n\n#include <sparrow/i_backend_logic.h>\n\nusing namespace goku;\n\nclass ChatRpcLogic : public goku::IBackendLogic {\npublic:\n int OnRpcRequest(IKernel* kernel,\n uint32_t request,\n const VariantList& args,\n VariantList& result) override;\n\nprivate:\n void Send(IKernel* kernel, client_uuid_t cuuid, const std::string& message);\n void HandleChatRpcAddPlayer(const VariantList& args, VariantList& result);\n void HandleChatRpcRemovePlayer(const VariantList& args, VariantList& result);\n void HandleChatRpcSetNickName(const VariantList& args, VariantList& result);\n void HandleChatRpcWorldMessage(const VariantList& args, VariantList& result);\n void HandleChatRpcWhisperMessage(const VariantList& args, VariantList& result);\n\nprivate:\n IKernel* kernel_;\n std::set<client_uuid_t> all_users_;\n std::map<std::string, client_uuid_t> name_to_uuid_;\n};\n\n#endif // CHAT_SERVICE_LOGIC_H\n"
},
{
"alpha_fraction": 0.7062572240829468,
"alphanum_fraction": 0.7071263194084167,
"avg_line_length": 30.099098205566406,
"blob_id": "1426535c929163f7b1ef000500645c5164718993",
"content_id": "86514b25461e156219fd2992c553171d8ebf2425",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3454,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 111,
"path": "/include/sparrow/sparrow_application.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_SPARROW_APPLICATION_H\n#define SPARROW_SPARROW_APPLICATION_H\n\n#include <string>\n#include <vector>\n#include <map>\n#include <set>\n#include <memory>\n#include <unordered_map>\n\n#include \"sparrow_define.h\"\n#include \"server_cap.h\"\n#include \"entity_factory.h\"\n#include \"proto/data_codec.h\"\n\nNS_SPARROW_BEG\n\nclass SparrowApplication {\npublic:\n SparrowApplication();\n ~SparrowApplication();\n\n int Initial(int argc, char* argv[]);\n void Uninitial();\n int Run();\n\n std::string GetAssetsDir() const { return \"../assets/\"; }\n IEntityFactory* GetEntityFactory() const { return entity_factory_; }\n IDataCodec* GetInterClusterCodec() const { return cluster_codec_.get(); }\n node_id_t GetSelfNodeId() const { return node_id_; }\n \n template<typename T>\n T* GetGlobalEntity(const std::string& key) {\n auto iter = entities_.find(key);\n if (iter == entities_.end()) { return nullptr; }\n return dynamic_cast<T*>(iter->second);\n }\n\n int SetGlobalEntity(const std::string& key, IEntity* entity) {\n assert(entity);\n auto iter = entities_.find(key);\n if (iter != entities_.end()) {\n assert(0);\n return -1; \n }\n entity->AddRef();\n entities_.insert(std::make_pair(key, entity));\n return 0;\n }\n\n const NodeCap* GetNodeCap(node_id_t node_id) {\n auto iter = server_caps_.find(node_id);\n if (iter != server_caps_.end()) {\n return iter->second.get();\n }\n return nullptr;\n }\n\n void GetNodes(std::vector<const NodeCap*>& nodes) {\n for (auto& iter : server_caps_) {\n nodes.push_back(iter.second.get());\n }\n }\n\nprivate:\n void LoadAssets();\n void ParseCommandLine(int argc, char* argv[]);\n void RegisterCreators();\n void CreateFoundationEntities();\n void StartServer();\n void StartMasterServer(const MasterNodeCap& cap);\n void StartGateServer(const GateNodeCap& cap);\n void StartBackendServer(const BackendNodeCap& cap);\n\n node_id_t GetNodeIdByName(const std::string& server_name);\n const MasterNodeCap& GetMasterCap();\n\nprivate:\n IEntityFactory* entity_factory_;\n std::unique_ptr<IDataCodec> cluster_codec_;\n std::map<std::string, IEntity*> entities_;\n std::map<node_id_t, std::unique_ptr<NodeCap> > server_caps_;\n node_id_t node_id_;\n};\n\nextern goku::SparrowApplication* app;\n\nNS_SPARROW_END\n\n#endif // SPARROW_SPARROW_APPLICATION_H\n"
},
{
"alpha_fraction": 0.7014989256858826,
"alphanum_fraction": 0.7029264569282532,
"avg_line_length": 32.67788314819336,
"blob_id": "5be260b74e64612d2fc4e9a219d914f202c3f959",
"content_id": "de6a27881a96eea2ce2b7f736e42da60ceeae308",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 7007,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 208,
"path": "/src/sparrow/backend_server.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <string>\n#include <cstdint>\n\n#include <sparrow/sparrow.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/backend_server.h>\n#include <sparrow/gate_node.h>\n#include <sparrow/proto/cluster.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/event_proto.h>\n#include <sparrow/backend_logic_module.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(BackendServer)\n\nBackendServer::BackendServer() {\n}\n\n\nBackendServer::~BackendServer() {\n}\n\n\nbool BackendServer::OnInit(const VariantList& args) {\n BackendServerInit cap;\n if (cap.Deserialize(args)) {\n assert(0);\n return false;\n }\n if (!MasterClient::OnInit(args)) {\n return false;\n }\n assert(!cap.module_name.empty());\n if (cap.module_name.empty()) { return false; }\n BackendLogicModuleInit module_init;\n module_init.module_name = cap.module_name;\n module_init.module_init = cap.init_args;\n backend_logic_module_ = (BackendLogicModule*)CreateEntity(\"BackendLogicModule\", module_init);\n if (!backend_logic_module_) { return false; }\n AddHarborCommandListener(HBM_CLIENTONLINE, &BackendServer::OnHarborCommand, this);\n AddHarborCommandListener(HBM_CLIENTLOGICMSG, &BackendServer::OnHarborCommand, this);\n AddHarborCommandListener(HBM_CLIENTOFFLINE, &BackendServer::OnHarborCommand, this);\n AddHarborCommandListener(HBM_RPCCALL, &BackendServer::OnHarborCommand, this);\n AddHarborCommandListener(HBM_RPCRET, &BackendServer::OnHarborCommand, this);\n return true;\n}\n\n\nvoid BackendServer::OnUninit() {\n MasterClient::OnUninit();\n if (backend_logic_module_) { backend_logic_module_->Release(); }\n RemoveHarborCommandListener(HBM_CLIENTONLINE, &BackendServer::OnHarborCommand);\n RemoveHarborCommandListener(HBM_CLIENTLOGICMSG, &BackendServer::OnHarborCommand);\n RemoveHarborCommandListener(HBM_CLIENTOFFLINE, &BackendServer::OnHarborCommand);\n RemoveHarborCommandListener(HBM_RPCCALL, &BackendServer::OnHarborCommand);\n RemoveHarborCommandListener(HBM_RPCRET, &BackendServer::OnHarborCommand);\n}\n\n\nint BackendServer::ScheduleRpc(node_id_t node_id,\n uint32_t func,\n const VariantList& args,\n rpc_cb_t cb,\n void* ud) {\n RpcCall rpc_call;\n rpc_call.func = func;\n rpc_call.args = args;\n rpc_call.cookie = ++cookie_;\n auto iter = rpc_cbs_.find(rpc_call.cookie);\n assert(iter == rpc_cbs_.end());\n if (cb) {\n rpc_cbs_.insert(std::make_pair(rpc_call.cookie, std::make_tuple(cb, ud)));\n }\n SendHarborCommand(node_id, rpc_call);\n return -1;\n}\n\n\nvoid BackendServer::OnHarborCommand(void* ud, uint32_t command, const VariantList& args) {\n EventHarborCommand e_harbor_command;\n int ret = e_harbor_command.Deserialize(args);\n assert(!ret);\n auto self = (BackendServer*)ud;\n switch (command) {\n case HBM_CLIENTONLINE:\n self->HandleClientOnline(e_harbor_command.source, e_harbor_command.args);\n break;\n case HBM_CLIENTLOGICMSG:\n self->HandleClientMessage(e_harbor_command.source, e_harbor_command.args);\n break;\n case HBM_CLIENTOFFLINE:\n self->HandleClientOffline(e_harbor_command.source, e_harbor_command.args);\n break;\n case HBM_RPCCALL:\n self->HandleRpcCall(e_harbor_command.source, e_harbor_command.args);\n break;\n case HBM_RPCRET:\n self->HandleRpcRet(e_harbor_command.source, e_harbor_command.args);\n break;\n default:\n assert(0);\n break;\n }\n}\n\n\nvoid BackendServer::HandleClientOnline(node_id_t source, const VariantList& args) {\n Trace(TraceLevel::Info, \"HandleClientOnline\");\n ClientOnline notify;\n if (notify.Deserialize(args)) { \n assert(0);\n return;\n }\n GamePlayer* player = new GamePlayer(notify.client_uuid);\n players_.insert(std::make_pair(notify.client_uuid, std::unique_ptr<GamePlayer>(player)));\n auto logic = backend_logic_module_->GetBackendLogic();\n assert(logic);\n logic->OnPlayerEnter(GetKernel(), player);\n}\n\n\nvoid BackendServer::HandleClientOffline(node_id_t source, const VariantList& args) {\n Trace(TraceLevel::Info, \"HandleClientOffline\");\n ClientOffline notify;\n if (notify.Deserialize(args)) {\n assert(0);\n return;\n }\n auto player = players_.find(notify.client_uuid)->second.get();\n assert(player);\n auto logic = backend_logic_module_->GetBackendLogic();\n assert(logic);\n logic->OnPlayerExit(GetKernel(), player);\n}\n\n\nvoid BackendServer::HandleClientMessage(node_id_t source, const VariantList& args) {\n Trace(TraceLevel::Info, \"HandleClientMessage\");\n ClientLogicMessage message;\n if (message.Deserialize(args)) { \n assert(0);\n return;\n }\n auto iter = players_.find(message.source);\n assert(iter != players_.end());\n auto player = iter->second.get();\n auto logic = backend_logic_module_->GetBackendLogic();\n assert(logic);\n logic->OnPlayerMessage(GetKernel(), player, message.GetData(), message.GetSize());\n}\n\n\nvoid BackendServer::HandleRpcCall(node_id_t source, const VariantList& args) {\n RpcCall rpc_call;\n int ret = rpc_call.Deserialize(args);\n assert(!ret);\n auto logic = backend_logic_module_->GetBackendLogic();\n assert(logic);\n RpcRet rpc_ret;\n rpc_ret.result = logic->OnRpcRequest(GetKernel(),\n rpc_call.func,\n rpc_call.args,\n rpc_ret.value);\n rpc_ret.cookie = rpc_call.cookie;\n rpc_ret.func = rpc_call.func;\n SendHarborCommand(source, rpc_ret);\n}\n\n\nvoid BackendServer::HandleRpcRet(node_id_t source, const VariantList& args) {\n RpcRet rpc_ret;\n int ret = rpc_ret.Deserialize(args);\n assert(!ret);\n auto iter = rpc_cbs_.find(rpc_ret.cookie);\n if (iter != rpc_cbs_.end()) {\n rpc_cb_t cb;\n void* cb_ud;\n std::tie(cb, cb_ud) = iter->second;\n rpc_cbs_.erase(iter);\n cb(rpc_ret.func, rpc_ret.result, rpc_ret.value, cb_ud);\n } else {\n Trace(TraceLevel::Error, \"RPC cookie not found\");\n }\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.73893803358078,
"alphanum_fraction": 0.7461283206939697,
"avg_line_length": 30.719297409057617,
"blob_id": "f44758f72253984930e8e6ae4214ed89ba1094f5",
"content_id": "c2ea04a953ed05ed4c37c6996670f9270923670e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1808,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 57,
"path": "/include/sparrow/i_entity.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_I_ENTITY_H\n#define SPARROW_I_ENTITY_H\n\n#include <memory>\n#include <cstdint>\n\n#include \"sparrow_define.h\"\n#include \"variant.h\"\n\nNS_SPARROW_BEG\n\ntypedef uintptr_t entid_t;\n\nenum { INVALID_ENTITY_ID = (uintptr_t)-1 };\n\nclass IEntity {\npublic:\n virtual ~IEntity() {}\n\n virtual uint32_t AddRef() = 0;\n virtual void Release() = 0;\n virtual uint32_t GetRefCount() const = 0;\n virtual entid_t GetEntid() const = 0;\n\nprotected:\n virtual bool OnCreate(const VariantList& args) = 0;\n virtual bool OnInit(const VariantList& args) = 0;\n virtual void OnUninit() = 0;\n virtual void OnDestroy() = 0;\n\n friend class EntityFactory;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_I_ENTITY_H\n"
},
{
"alpha_fraction": 0.7176287174224854,
"alphanum_fraction": 0.7215288877487183,
"avg_line_length": 27.808988571166992,
"blob_id": "2688a887122e339c7b89e41463ac01969ba9ed49",
"content_id": "efd6b632caa6955837c2c247404ef77e19b13c08",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2564,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 89,
"path": "/include/sparrow/proto/client.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_PROTO_CLIENT_H\n#define SPARROW_PROTO_CLIENT_H\n\n#include <string>\n#include <vector>\n#include <cstdint>\n\n#include \"base.h\"\n#include \"command_define.h\"\n\nNS_SPARROW_BEG\n\nstruct AuthRequest : public CommonHeader {\n AuthRequest() {\n command = CMD_AUTH;\n }\n\n std::string uname;\n std::string passwd;\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n CommonHeader::DoSerialize(ostream);\n ostream.AddValue(uname);\n ostream.AddValue(passwd);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n CommonHeader::DoDeserialize(istream);\n uname = istream.GetValue<const char*>(1);\n passwd = istream.GetValue<const char*>(2);\n return 0;\n }\n};\n\n\nstruct LogicRequest : public CommonHeader {\n LogicRequest() {\n command = CMD_LOGIC;\n }\n\n std::vector<int8_t> data;\n\n size_t GetDataSize() const { return data.size(); }\n const void* GetData() const { return data.empty() ? nullptr : &data[0]; }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n CommonHeader::DoSerialize(ostream);\n ostream.AddValue(data);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n CommonHeader::DoDeserialize(istream);\n block_t block = istream.GetValue<block_t>(1);\n if (block.sz) {\n data.resize(block.sz);\n memcpy(&data[0], block.data, block.sz);\n }\n return 0;\n }\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_PROTO_CLIENT_H\n"
},
{
"alpha_fraction": 0.48296836018562317,
"alphanum_fraction": 0.485401451587677,
"avg_line_length": 23.176469802856445,
"blob_id": "0698e575767b7fd2c7355bd24db691d1eb1a6275",
"content_id": "a655f63f7f870f880d3e3c7c6da68b72b3f8e919",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 822,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 34,
"path": "/build/gyp/sparrow.gyp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "{\n 'includes': [\n 'define.gypi'\n ],\n 'targets':[\n {\n 'target_name':'sparrow',\n 'type':'executable',\n 'include_dirs':[\n \t'<(ROOT_DIR)/external',\n '<(ROOT_DIR)/external/lua/src',\n '<(ROOT_DIR)/external/jsoncpp/include',\n '<(ROOT_DIR)/external/libuv/include',\n '<(ROOT_DIR)/include'\n ],\n 'dependencies': [\n \t'jsoncpp.gyp:jsoncpp',\n \t'libuv.gyp:uv',\n \t'lua.gyp:lua'\n ],\n 'sources':[\n '<!@(find <(ROOT_DIR)/src/sparrow -type f -name \"*.cpp\")'\n ],\n 'sources!':[\n ],\n 'xcode_settings': {\n \t'INFOPLIST_FILE': 'sparrow-info.plist',\n \t'CLANG_CXX_LANGUAGE_STANDARD': 'c++14',\n \t'CLANG_CXX_LIBRARY': 'libc++',\n 'CONFIGURATION_BUILD_DIR': '<(ROOT_DIR)/bin'\n }\n }\n ] # end of targets\n}\n"
},
{
"alpha_fraction": 0.7275189757347107,
"alphanum_fraction": 0.7275189757347107,
"avg_line_length": 27.84375,
"blob_id": "da5c039432c9dcfc32d89f23b52e1fe3b9cdc20b",
"content_id": "6f37ca2e5bda3589afcb7dcef761c15dae4c3bfa",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1846,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 64,
"path": "/include/sparrow/utils/spin_lock.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_SPIN_LOCK_H\n#define SPARROW_SPIN_LOCK_H\n\n#include <atomic>\n#include <assert.h>\n\n#include \"../sparrow_define.h\"\n#include \"noncopyable.h\"\n\nNS_SPARROW_BEG\n\nclass SpinLock : private Noncopyable {\nprivate:\n enum { LOCKED, UNLOCKED };\n\npublic:\n SpinLock() {\n lock_ = UNLOCKED;\n }\n\n // use lowercase to make sure this object can compatible with std::lock_guard\n void lock() {\n assert(lock_ == UNLOCKED);\n while (!trylock()) {}\n }\n\n void unlock() {\n assert(lock_ == LOCKED);\n lock_.store(UNLOCKED);\n }\n\n bool trylock() {\n int expect = UNLOCKED;\n return lock_.compare_exchange_strong(expect, LOCKED);\n }\n\nprivate:\n std::atomic_int lock_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_SPIN_LOCK_H\n"
},
{
"alpha_fraction": 0.6804285645484924,
"alphanum_fraction": 0.6871129274368286,
"avg_line_length": 25.219072341918945,
"blob_id": "05841936c4365e497c0cf3ede40b160867be4ab1",
"content_id": "2a27ef36b7a62b81ee16af17ba8a9cca6c11244f",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 10173,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 388,
"path": "/include/sparrow/proto/cluster.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_PROTO_NODE_H\n#define SPARROW_PROTO_NODE_H\n\n#include <string>\n#include <vector>\n#include <memory>\n#include <assert.h>\n\n#include \"base.h\"\n#include \"command_define.h\"\n\nNS_SPARROW_BEG\n\nstruct HarborSyn : public CommonHeader {\n node_id_t node_id;\n\n HarborSyn() {\n command = CMD_HARBORSYN;\n }\n\n int DoSerialize(VariantList& ostream) const override {\n CommonHeader::DoSerialize(ostream);\n ostream.AddValue(node_id);\n return 0;\n }\n int DoDeserialize(const VariantList& istream) override {\n CommonHeader::DoDeserialize(istream);\n node_id = istream.GetValue<uint16_t>(GetDeserializeOffet());\n return 0;\n }\n};\n\n\nstruct HarborAck : public HarborSyn {\n HarborAck() {\n command = CMD_HARBORACK;\n }\n};\n\n\nstruct HarborMessage : public CommonHeader {\n uint32_t message_type;\n VariantList message_data;\n\n HarborMessage() {\n command = CMD_HARBORMESSAGE;\n }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n CommonHeader::DoSerialize(ostream);\n ostream.AddValue(message_type);\n const size_t sz = message_data.GetByteSize();\n ostream.AddValue(sz);\n if (sz) {\n std::unique_ptr<char[]> data(new char[sz]);\n const int ret = message_data.Serialize(data.get(), sz);\n assert(!ret);\n ostream.AddValue(data.get(), sz);\n }\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n CommonHeader::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffet(), message_type);\n size_t sz = 0;\n istream.GetValue(GetDeserializeOffet() + 1, sz);\n if (sz) {\n block_t block = istream.GetValue<block_t>(GetDeserializeOffet() + 2);\n const int ret = message_data.Deserialize(block.data,block.sz);\n assert(!ret);\n }\n return 0;\n }\n};\n\n// The following message will be serialized into the args memeber of HarborMessage\n\nstruct HarborCommand : public ISerialize {\n HarborCommand() { command = -1; }\n virtual ~HarborCommand() {}\n uint32_t command;\n\nprotected:\n int DoSerialize(VariantList& ostream) const override { return 0; }\n int DoDeserialize(const VariantList& istream) override { return 0; }\n size_t GetDeserializeOffset() const { return 0; }\n};\n\n\nstruct Response : public HarborCommand {\n int32_t res;\n\n Response() {}\n\n Response(uint32_t cmd, int32_t res) {\n this->command = cmd;\n this->res = res;\n }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(res);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), res);\n return 0;\n }\n};\n\n\nstruct MasterHarborReady : public HarborCommand {\n MasterHarborReady() { command = HBM_MASTERHARBORREADY; }\n};\n\n\nstruct GateReady : public HarborCommand {\n GateReady() { command = HBM_GATEREADY; }\n};\n\n\nstruct OpenGate : public HarborCommand {\n OpenGate() { command = HBM_OPENGATE; }\n};\n\n\ntypedef Response OpenGateResponse;\n\n\nstruct ClientOnline : public HarborCommand {\n client_uuid_t client_uuid;\n\n ClientOnline() {\n command = HBM_CLIENTONLINE;\n }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(client_uuid.gate_id);\n ostream.AddValue(client_uuid.conn_id);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), client_uuid.gate_id);\n istream.GetValue(GetDeserializeOffset() + 1, client_uuid.conn_id);\n return 0;\n }\n};\n\n\nstruct ClientOffline : public ClientOnline {\n ClientOffline() {\n command = HBM_CLIENTOFFLINE;\n }\n};\n\n\nstruct ClientLogicMessage : public HarborCommand {\n client_uuid_t source;\n std::vector<char> data;\n\n ClientLogicMessage() {\n command = HBM_CLIENTLOGICMSG;\n }\n\n ClientLogicMessage(client_uuid_t source, const void* data, size_t sz) {\n command = HBM_CLIENTLOGICMSG;\n this->source = source;\n if (sz) {\n this->data.resize(sz);\n memcpy(&this->data[0], data, sz);\n }\n }\n\n const void* GetData() const { return data.empty() ? nullptr : &data[0]; }\n size_t GetSize() const { return data.size(); }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(source.gate_id);\n ostream.AddValue(source.conn_id);\n ostream.AddValue(data);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), source.gate_id);\n istream.GetValue(GetDeserializeOffset() + 1, source.conn_id);\n block_t block = istream.GetValue<block_t>(GetDeserializeOffset() + 2);\n if (block.sz) {\n data.resize(block.sz);\n memcpy(&data[0], block.data, block.sz);\n }\n return 0;\n }\n};\n\n\nstruct TransClientData : public HarborCommand {\n client_uuid_t target;\n std::vector<char> data;\n\n TransClientData() {\n command = HBM_TRANSCLIENTDATA;\n }\n\n TransClientData(client_uuid_t client_uuid, const void* data, size_t sz) {\n command = HBM_TRANSCLIENTDATA;\n this->target = client_uuid;\n if (sz) {\n this->data.resize(sz);\n memcpy(&this->data[0], data, sz);\n }\n }\n\n size_t GetSize() const { return data.size(); }\n const void* GetData() const { return data.empty() ? nullptr : &data[0]; }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(target.gate_id);\n ostream.AddValue(target.conn_id);\n ostream.AddValue(data);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), target.gate_id);\n istream.GetValue(GetDeserializeOffset() + 1, target.conn_id);\n block_t block = istream.GetValue<block_t>(GetDeserializeOffset() + 2);\n if (block.sz) {\n data.resize(block.sz);\n memcpy(&data[0], block.data, block.sz);\n }\n return 0;\n }\n};\n\n\nstruct KickOffClient : public HarborCommand {\n client_uuid_t target;\n\n KickOffClient() {\n command = HBM_KICKOFFCLIENT;\n }\n\n KickOffClient(client_uuid_t client_uuid) {\n command = HBM_KICKOFFCLIENT;\n this->target = client_uuid;\n }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(target.gate_id);\n ostream.AddValue(target.conn_id);\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), target.gate_id);\n istream.GetValue(GetDeserializeOffset() + 1, target.conn_id);\n return 0;\n }\n};\n\n\nstruct RpcCall : public HarborCommand {\n uint32_t cookie;\n uint32_t func;\n VariantList args;\n\n RpcCall() {\n command = HBM_RPCCALL;\n }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(cookie);\n ostream.AddValue(func);\n const size_t sz = args.GetByteSize();\n ostream.AddValue(sz);\n if (sz) {\n std::unique_ptr<char[]> data(new char[sz]);\n int ret = args.Serialize(data.get(), sz);\n assert(!ret);\n ostream.AddValue(data.get(), sz);\n }\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), cookie);\n istream.GetValue(GetDeserializeOffset() + 1, func);\n size_t sz = 0;\n istream.GetValue(GetDeserializeOffset() + 2, sz);\n if (sz) {\n block_t block = istream.GetValue<block_t>(GetDeserializeOffset() + 3);\n int ret = args.Deserialize(block.data, block.sz);\n assert(!ret);\n }\n return 0;\n }\n};\n\n\nstruct RpcRet : public HarborCommand {\n uint32_t cookie;\n uint32_t func;\n int32_t result;\n VariantList value;\n\n RpcRet() {\n command = HBM_RPCRET;\n }\n\nprivate:\n int DoSerialize(VariantList& ostream) const override {\n HarborCommand::DoSerialize(ostream);\n ostream.AddValue(cookie);\n ostream.AddValue(func);\n ostream.AddValue(result);\n const size_t sz = value.GetByteSize();\n ostream.AddValue(sz);\n if (sz) {\n std::unique_ptr<char[]> data(new char[sz]);\n const int ret = value.Serialize(data.get(), sz);\n assert(!ret);\n ostream.AddValue(data.get(), sz);\n }\n return 0;\n }\n\n int DoDeserialize(const VariantList& istream) override {\n HarborCommand::DoDeserialize(istream);\n istream.GetValue(GetDeserializeOffset(), cookie);\n istream.GetValue(GetDeserializeOffset() + 1, func);\n istream.GetValue(GetDeserializeOffset() + 2, result);\n size_t sz = 0;\n istream.GetValue(GetDeserializeOffset() + 3, sz);\n if (sz) {\n block_t block = istream.GetValue<block_t>(GetDeserializeOffset() + 4);\n const int ret = value.Deserialize(block.data, block.sz);\n assert(!ret);\n }\n return 0;\n }\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_PROTO_NODE_H\n"
},
{
"alpha_fraction": 0.7435897588729858,
"alphanum_fraction": 0.7435897588729858,
"avg_line_length": 12,
"blob_id": "ab6950f7ce8947558cc26050f8a4c2d0bfef3ead",
"content_id": "0d7c5a4bed4ac2a51b4cfadb52c92f80fdae9c64",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 39,
"license_type": "permissive",
"max_line_length": 27,
"num_lines": 3,
"path": "/bin/start_master.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "import os\n\nos.system(\"sparrow master\")\n"
},
{
"alpha_fraction": 0.6935483813285828,
"alphanum_fraction": 0.6935483813285828,
"avg_line_length": 25.571428298950195,
"blob_id": "91602f88bc5465109914ae3289082488f0f28863",
"content_id": "7475eb36e91a37209938c99cb96efac1272166f5",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3164,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 119,
"path": "/src/sparrow/sparrow_conf.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <sparrow/sparrow_conf.h>\n#include <sparrow/sparrow_application.h>\n\nNS_SPARROW_BEG\n\nnode_id_t GetNodeIdOfName(const std::string& server_name) {\n std::vector<const NodeCap*> servers;\n app->GetNodes(servers);\n for (auto& iter : servers) {\n if (iter->node_name == server_name) {\n return iter->node_id;\n }\n }\n return INVALID_NODE_ID;\n}\n\n\nstd::string GetNodeNameOfId(node_id_t node_id) {\n const NodeCap* cap = GetNodeCap(node_id);\n return cap ? cap->node_name.c_str() : nullptr;\n}\n\n\nconst NodeCap* GetNodeCap(node_id_t node_id) {\n return app->GetNodeCap(node_id);\n}\n\n\nNodeType GetNodeType(node_id_t node_id) {\n return GetNodeCap(node_id)->node_type;\n}\n\n\nstd::string GetNodeName(node_id_t node_id) {\n return GetNodeCap(node_id)->node_name;\n}\n\n\nconst MasterNodeCap* GetMasterNodeCap() {\n static const MasterNodeCap* mcap = nullptr;\n if (mcap) { return mcap; }\n std::vector<const NodeCap*> servers;\n app->GetNodes(servers);\n for (auto& iter : servers) {\n if (iter->node_type == NodeType::Master) {\n mcap = dynamic_cast<const MasterNodeCap*>(iter);\n break;\n }\n }\n assert(mcap);\n return mcap;\n}\n\n\nnode_id_t GetMasterNodeId() {\n return GetMasterNodeCap()->node_id;\n}\n\n\nnode_id_t GetSelfNodeId() {\n return app->GetSelfNodeId();\n}\n\n\nconst NodeCap* GetSelfNodeCap() {\n return app->GetNodeCap(GetSelfNodeId());\n}\n\n\nstd::string GetSelfNodeName() {\n return GetSelfNodeCap()->node_name.c_str();\n}\n\n\nvoid GetSlaveNodeIds(std::vector<node_id_t>& ids) {\n std::vector<const NodeCap*> servers;\n app->GetNodes(servers);\n if (!ids.empty()) { ids.clear(); }\n for (auto& iter : servers) {\n if (iter->node_type != NodeType::Master) {\n ids.push_back(iter->node_id);\n }\n }\n}\n\n\nstatic void GetSlaveServer(std::vector<node_id_t>& ids, NodeType cat) {\n std::vector<const NodeCap*> servers;\n app->GetNodes(servers);\n if (!ids.empty()) { ids.clear(); }\n for (auto& iter : servers) {\n if (iter->node_type == cat) {\n ids.push_back(iter->node_id);\n }\n }\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7292457818984985,
"alphanum_fraction": 0.7292457818984985,
"avg_line_length": 28.253969192504883,
"blob_id": "cf86eb5e980499c798c31b82eb60c5b815618408",
"content_id": "f64e6372a3e7fb92b966bf6b7c1bcaeb9076fc24",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1845,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 63,
"path": "/test_modules/chat_module/chat_user.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef CHAT_CHAT_USER_H\n#define CHAT_CHAT_USER_H\n\n#include <string>\n#include <cstdint>\n#include <memory>\n#include <sparrow/i_backend_logic.h>\n\n#include \"chat_proto.h\"\n\nclass ChatUser {\npublic:\n ChatUser(goku::IClient* game_player);\n ~ChatUser();\n\n void SetNickName(const std::string& nick_name) {\n nick_name_ = nick_name;\n }\n\n std::string GetNickName() const {\n return nick_name_;\n }\n\n void Send(const std::string& message) {\n ChatMessage cm;\n cm.message = message;\n Send(cm);\n }\n\nprivate:\n void Send(const CommonHeader& message);\n\nprivate:\n std::string nick_name_;\n goku::IClient* game_player_;\n};\n\n\ntypedef std::shared_ptr<ChatUser> ChatUserPtr;\n\n\n#endif // CHAT_CHAT_USER_H\n"
},
{
"alpha_fraction": 0.7039248943328857,
"alphanum_fraction": 0.7056313753128052,
"avg_line_length": 36.01052474975586,
"blob_id": "66ec8831a651df8e5f954a9f90ef2fdb3de22ee9",
"content_id": "b7f8b48abe2325dd34fd5c0cb6377406e850b669",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3518,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 95,
"path": "/src/sparrow/master_client.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <assert.h>\n\n#include <sparrow/master_client.h>\n#include <sparrow/proto/cluster.h>\n#include <sparrow/sparrow_errno.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/sparrow_application.h>\n#include <sparrow/harbor.h>\n#include <sparrow/event_proto.h>\n#include <sparrow/event_define.h>\n#include <sparrow/sparrow_net.h>\n\nNS_SPARROW_BEG\n\nMasterClient::MasterClient() {\n}\n\n\nMasterClient::~MasterClient() {\n}\n\n\nbool MasterClient::OnInit(const VariantList& args) {\n AddEventListener(EVENT_HARBOR_ONLINE, &MasterClient::OnEventHarborOnline, this);\n AddEventListener(EVENT_HARBOR_OFFLINE, &MasterClient::OnEventHarborOffline, this);\n AddHarborCommandListener(HBM_MASTERHARBORREADY, &MasterClient::HandleMasterHarborReady, this);\n return true;\n}\n\n\nvoid MasterClient::OnUninit() {\n RemoveEventListener(EVENT_HARBOR_ONLINE, &MasterClient::OnEventHarborOnline);\n RemoveEventListener(EVENT_HARBOR_OFFLINE, &MasterClient::OnEventHarborOffline);\n RemoveHarborCommandListener(HBM_MASTERHARBORREADY, &MasterClient::HandleMasterHarborReady);\n}\n\n\nvoid MasterClient::OnEventHarborOnline(void* ud,\n uint32_t signal,\n const VariantList& args) {\n auto* self = (MasterClient*)ud;\n EventHarborOnline e_harbor_online;\n int ret = e_harbor_online.Deserialize(args);\n assert(!ret);\n if (ret) { return; }\n const std::string node_name = GetNodeName(e_harbor_online.node_id);\n Trace(TraceLevel::Info, \"harbor node online %s\", node_name.c_str());\n}\n\n\nvoid MasterClient::OnEventHarborOffline(void* ud,\n uint32_t signal,\n const VariantList& args) {\n auto* self = (MasterClient*)ud;\n EventHarborOffline e_harbor_offline;\n int ret = e_harbor_offline.Deserialize(args);\n assert(!ret);\n if (ret) { return; }\n const std::string node_name = GetNodeName(e_harbor_offline.node_id);\n Trace(TraceLevel::Info, \"harbor node offline %s\", node_name.c_str());\n}\n\n\nvoid MasterClient::HandleMasterHarborReady(void* ud,\n uint32_t command,\n const VariantList& args) {\n Harbor* harbor = app->GetGlobalEntity<Harbor>(GLOBAL_ENT_HARBOR);\n assert(harbor);\n harbor->ConnectHarborNet();\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7055255770683289,
"alphanum_fraction": 0.7075471878051758,
"avg_line_length": 35.64197540283203,
"blob_id": "611307cc61b132d7cb568e6029650fa4ebb42c7a",
"content_id": "1f1b6bf2b33567ecef5c5f35410325d85e74e671",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2968,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 81,
"path": "/include/sparrow/sparrow_define.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_SPARROW_DEFINE_H\n#define SPARROW_SPARROW_DEFINE_H\n\n#include <cstdint>\n\n#ifndef __cplusplus\n#error sparrow depends c++ compiler\n#endif // __cplusplus\n\n\n#define NS_SPARROW_BEG namespace goku {\n#define NS_SPARROW_END }\n\n#define SPARROW_DEBUG 1\n\n\n#define GLOBAL_ENT_CONNECTIONPOOL \"GlobalConnectionPool\"\n#define GLOBAL_ENT_HARBOR \"GlobalHarbor\"\n#define GLOBAL_ENT_EVENTEMITTER \"GlobalEventEmitter\"\n#define GLOBAL_ENT_MASTERSERVER \"GlobalMasterServer\"\n#define GLOBAL_ENT_GATESERVER \"GlobalGateServer\"\n#define GLOBAL_ENT_BACKENDSERVER \"GlobalBackendServer\"\n#define GLOBAL_ENT_RPC \"GlobalRpc\"\n#define GLOBAL_ENT_SPARROWCONSOLE \"GlobalSparrowConsole\"\n#define GLOBAL_ENT_HARBORCMDSCHED \"GlobalHarborMessageSched\"\n#define GLOBAL_ENT_KERNEL \"GlobalKernel\"\n\nNS_SPARROW_BEG\n\ntypedef uint16_t node_id_t;\ntypedef int8_t* connection_id_t; // use pointer here just to diff node_id_t\n\n#define INVALID_CONNECTION_ID ((connection_id_t)-1)\n#define INVALID_NODE_ID ((node_id_t)-1)\n\nstruct client_uuid_t {\n node_id_t gate_id; ///< the gate which client connect to\n connection_id_t conn_id; ///< connection id in corr\n\n client_uuid_t(node_id_t gate_id = INVALID_NODE_ID,\n connection_id_t conn_id = INVALID_CONNECTION_ID) {\n this->gate_id = gate_id;\n this->conn_id = conn_id;\n }\n};\n\ninline bool operator==(const client_uuid_t& lhs, const client_uuid_t& rhs) {\n return lhs.gate_id == rhs.gate_id && lhs.conn_id == rhs.conn_id;\n}\n\ninline bool operator<(const client_uuid_t& lhs, const client_uuid_t& rhs) {\n if (lhs.gate_id != rhs.gate_id) {\n return lhs.gate_id < rhs.gate_id;\n }\n return lhs.conn_id < rhs.conn_id;\n}\n\nNS_SPARROW_END\n\n#endif // SPARROW_SPARROW_DEFINE_H\n"
},
{
"alpha_fraction": 0.7130434513092041,
"alphanum_fraction": 0.7347826361656189,
"avg_line_length": 20,
"blob_id": "180b450c74024dbc00777826739abf482e1ac799",
"content_id": "c8abf411792df4556c4147b72280aeab29b370dc",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 400,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 11,
"path": "/docs/uv.md",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "1. `uv_default_loop`的线程安全性\n\n 从代码看来,多个线程同时调用`uv_default_loop`,全局结构可能会初始化多次,导致一些资源(完成端口)泄露。\n\n2. `test`分析:\n\n\t* `test-tcp-close.c`:uv\\_write4次,紧接着uv\\_close,结果write\\_cb在close\\_cb前被回调4次,最后才close\\_cb\n\n\n\n3. 所有的handle全部动态分配,在close_cb中释放"
},
{
"alpha_fraction": 0.7271669507026672,
"alphanum_fraction": 0.7294939160346985,
"avg_line_length": 30.254545211791992,
"blob_id": "825ae130ef3294b0c976cbd1361a2ce82bfa0eef",
"content_id": "f11291538e8f08aa9c943116279dacd56c7f47f1",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3440,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 110,
"path": "/include/sparrow/harbor.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_HARBOR_H\n#define SPARROW_HARBOR_H\n\n#include <map>\n#include <memory>\n#include <set>\n#include <assert.h>\n#include <string>\n#include <vector>\n\n#include \"sparrow_define.h\"\n#include \"entity.h\"\n#include \"i_entity_creator.h\"\n#include \"sparrow_net.h\"\n\nNS_SPARROW_BEG\n\nclass Connector;\nclass TcpServer;\nclass HarborNode;\n\nclass Harbor : public Entity {\npublic:\n DECL_ENTITY_CREATOR(Harbor)\n\n Harbor();\n ~Harbor();\n\n int ConnectHarborNet();\n\n int SendHarborMessage(node_id_t node_id, uint32_t command, VariantList&& args);\n\n uint32_t GetNeighbourNodeCount() const;\n\n bool IsNodeOnline(node_id_t node_id) const;\n \n void GetOnlineNodeIds(std::vector<node_id_t>& nodes) const;\n\nprotected:\n bool OnInit(const VariantList& args) override;\n void OnUninit() override;\n\nprivate:\n static void OnHarborConnectIn(void* ud, connection_id_t connid);\n static void OnHarborConnectOut(void* ud, connection_id_t connid);\n static void OnHarborRead(void* ud, connection_id_t, const void* data, int32_t sz);\n static void OnHarborClosed(void* ud, connection_id_t connid);\n static int HandleHarborMessage(void* ud, uint16_t cmd, const VariantList& args);\n\n int HandleHarborSyn(HarborNode* node, const VariantList& args);\n int HandleHarborAck(HarborNode* node, const VariantList& args);\n int HandleHarborMessage(HarborNode* node, const VariantList& args);\n\n int SendHarborCommand(HarborNode* node, const ISerialize& command);\n\n bool ShouldConnectHarbor(node_id_t node_id) const {\n assert(node_id_ != INVALID_NODE_ID);\n assert(node_id != INVALID_NODE_ID);\n return node_id > node_id_;\n }\n\n HarborNode* GetHarbor(connection_id_t connid) const;\n HarborNode* GetHarbor(node_id_t node_id) const;\n\n typedef std::unique_ptr<HarborNode> HarborNodePtr;\n\n std::set<HarborNodePtr>::iterator GetHarborIter(HarborNode* node) {\n std::set<HarborNodePtr>::iterator iter;\n for (iter = harbor_nodes_.begin(); iter != harbor_nodes_.end(); ++iter) {\n if ((*iter).get() == node) {\n break;\n }\n }\n return iter;\n }\n\n int ConnectHarbor(node_id_t node_id);\n\nprivate:\n TcpServer* server_;\n node_id_t node_id_;\n bool is_master_node_;\n std::set<HarborNodePtr> harbor_nodes_;\n std::map<node_id_t, Connector*> harbor_connectors_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_HARBOR_H\n"
},
{
"alpha_fraction": 0.7458064556121826,
"alphanum_fraction": 0.7483870983123779,
"avg_line_length": 30.958763122558594,
"blob_id": "2cbe2bdfd2844d30dae7fde13aa5dec808f2b0c9",
"content_id": "cb6e8b72c427809528d13da9345ad4420c73ec15",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3100,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 97,
"path": "/include/sparrow/sparrow.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_SPARROW_H\n#define SPARROW_SPARROW_H\n\n#include <string>\n#include <vector>\n#include <cstdint>\n#include <cassert>\n\n#include \"sparrow_define.h\"\n#include \"entity_factory.h\"\n#include \"connection_pool.h\"\n#include \"sparrow_conf.h\"\n#include \"event_emitter.h\"\n#include \"event_define.h\"\n#include \"harbor.h\"\n#include \"harbor_command_sched.h\"\n#include \"i_kernel.h\"\n#include \"backend_server.h\"\n\nNS_SPARROW_BEG\n\n// global object\nIEntityFactory* GetEntityFactory();\nConnectionPool* GetConnectionPool();\nIDataCodec* GetInterClusterCodec();\nEventEmitter* GetEventEmitter();\nHarbor* GetHarbor();\nHarborCommandSched* GetHarborCommandSched();\nIKernel* GetKernel();\nBackendServer* GetBackendServer();\n\n\n// entity factory\ninline IEntity* CreateEntity(const std::string& entity_name,\n const ISerialize& init_args) {\n VariantList init;\n init_args.Serialize(init);\n auto factory = GetEntityFactory();\n return factory->Create(entity_name, VariantList(), init);\n}\n\ninline IEntity* CreateEntity(const std::string& entity_name) {\n VariantList foo, bar;\n auto factory = GetEntityFactory();\n return factory->Create(entity_name, foo, bar);\n}\n\n\n// global event observer\ninline void AddEventListener(uint32_t singal, event_cb_t cb, void* ud) {\n assert(singal >= EVENT_BEGIN && singal < EVENT_END);\n auto emitter = GetEventEmitter();\n emitter->AddListener(singal, cb, ud);\n}\n\ninline void RemoveEventListener(uint32_t singal, event_cb_t cb) {\n assert(singal >= EVENT_BEGIN && singal < EVENT_END);\n auto emitter = GetEventEmitter();\n emitter->RemoveListener(singal, cb);\n}\n\n\n// harbor command observer\ninline void AddHarborCommandListener(uint32_t singal, event_cb_t cb, void* ud) {\n auto sched = GetHarborCommandSched();\n sched->AddListener(singal, cb, ud);\n}\n\ninline void RemoveHarborCommandListener(uint32_t singal, event_cb_t cb) {\n auto sched = GetHarborCommandSched();\n sched->RemoveListener(singal, cb);\n}\n\nNS_SPARROW_END\n\n#endif // SPARROW_SPARROW_H\n"
},
{
"alpha_fraction": 0.6077795624732971,
"alphanum_fraction": 0.6191247701644897,
"avg_line_length": 33.66292190551758,
"blob_id": "d3660f1d34a2772b93d521acd0283768fef31079",
"content_id": "b2e40a1f2580e6758529544144199f27519c8f06",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3085,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 89,
"path": "/bin/chat_client/chat_client.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n# Copyright http://www.gokulab.com. All rights reserved.\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n \n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\nimport socket\nimport select\nimport struct\nimport thread\nimport threading\nimport Queue\n\nclass NetworkThread(threading.Thread):\n def __init__(self, gate_addr, gate_port):\n threading.Thread.__init__(self) \n self.gate_addr = gate_addr\n self.gate_port = gate_port\n self.exit = False\n self.send_queue = Queue.Queue(maxsize = 512) # Queue is thread safe.\n\n\n def run(self):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n try:\n sock.connect((self.gate_addr, self.gate_port))\n sock.setblocking(0)\n print \"connect to gate ok...\"\n while not self.exit:\n # read first\n readable_socks, _, _ = select.select([sock], [], [], 0.01)\n if len(readable_socks) > 0:\n print \"start read...\"\n stream = sock.recv(512)\n pack_sz, strlen, str = struct.unpack(\"<2I64s\", stream)\n print pack_sz, strlen, str\n # then check send queue\n while not self.send_queue.empty():\n sock.send(self.send_queue.get(block = False))\n except Exception, e:\n print e\n sock.close()\n \n\n def stop(self):\n self.exit = True\n\n\n def send(self, data):\n self.send_queue.put(data)\n\n\ndef main():\n gate_addr = '127.0.0.1'\n gate_port = 9001\n handle = NetworkThread(gate_addr, gate_port)\n handle.start()\n while True:\n mess = raw_input(\"Enter your input: \");\n if len(mess) == 0 or len(mess) == 64:\n continue\n if mess == \"quit\":\n break\n pack_sz = 2 + 2 + len(mess)\n stream = struct.pack(\"<2I64s\", pack_sz, len(mess), mess)\n #pack_sz, strlen, str = struct.unpack(\"<2I64s\", stream)\n #print pack_sz, strlen, str\n handle.send(stream)\n handle.join()\n\n \nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.7657342553138733,
"alphanum_fraction": 0.7664335370063782,
"avg_line_length": 32.25581359863281,
"blob_id": "9d459e0f814684a05d7a2f26933888a36c3022ba",
"content_id": "2510b19c10ca2ac8cac3af2c654939eedb8e9d5b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1430,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 43,
"path": "/include/sparrow/sparrow_errno.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_SPARROW_ERRNO_H\n#define SPARROW_SPARROW_ERRNO_H\n\n#include \"sparrow_define.h\"\n\nNS_SPARROW_BEG\n\nenum {\n ENOTFOUND = 1,\n EINVALIDARG,\n EWAITDATA,\n EBUFOVERFLOW,\n ENOTHANDLE,\n ERPCNOIMPL,\n EMAXERRNO\n};\n\nconst char* GetErrorMessage(int err);\n\nNS_SPARROW_END\n\n#endif // SPARROW_SPARROW_ERRNO_H\n"
},
{
"alpha_fraction": 0.6009653806686401,
"alphanum_fraction": 0.6084070801734924,
"avg_line_length": 25.875675201416016,
"blob_id": "f1efd57ab4cbb40c86df4f4b13c6dab8abbd1778",
"content_id": "56ef576ae5ad5f9449fbe06d1d503283890280a1",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4974,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 185,
"path": "/include/sparrow/proto/data_codec.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_PROTO_WIRE_STREAM_H\n#define SPARROW_PROTO_WIRE_STREAM_H\n\n#include <cstdint>\n#include <assert.h>\n#include <limits.h>\n#include <string.h>\n#include <vector>\n#include <memory>\n\n#include \"../sparrow_define.h\"\n#include \"../sparrow_errno.h\"\n\nNS_SPARROW_BEG\n\nclass IDataCodec {\npublic:\n virtual ~IDataCodec() {}\n\n int Encode(const void* idata, size_t isz,\n void* odata, size_t& osz) {\n return DoEncode(idata, isz, odata, osz);\n }\n\n int Decode(const void* idata, size_t isz,\n void* odata, size_t& osz,\n size_t& consumed_sz) {\n return DoDecode(idata, isz, odata, osz, consumed_sz);\n }\n \nprotected:\n virtual int DoEncode(const void* idata, size_t isz,\n void* odata, size_t& osz) = 0;\n\n virtual int DoDecode(const void* idata, size_t isz,\n void* odata, size_t& osz,\n size_t& consumed_sz) = 0;\n};\n\n\nclass DataCodecPrefixHead : public IDataCodec {\npublic:\n int DoEncode(const void* idata, size_t isz,\n void* odata, size_t& osz) override {\n assert(isz + 2 <= USHRT_MAX);\n if (osz < isz + 2) {\n osz = isz + 2;\n return EBUFOVERFLOW;\n }\n *(uint16_t*)odata = (uint16_t)isz + 2;\n memcpy((char*)odata + 2, idata, isz);\n osz = isz + 2;\n return 0;\n }\n\n int DoDecode(const void* idata, size_t isz,\n void* odata, size_t& osz,\n size_t& consumed_sz) override {\n if (isz < 2) { return EWAITDATA; }\n uint16_t pack_sz = *(uint16_t*)idata;\n if (isz < pack_sz) { return EWAITDATA; }\n if ((uint16_t)osz < pack_sz - 2) {\n osz = pack_sz - 2;\n return EBUFOVERFLOW;\n }\n memcpy(odata, (char*)idata + 2, pack_sz - 2);\n osz = pack_sz - 2;\n consumed_sz = pack_sz;\n return 0;\n }\n};\n\n\nclass DataCodecCompress : public IDataCodec {\npublic:\n int DoEncode(const void* idata, size_t isz,\n void* odata, size_t& osz) override {\n if (osz < isz) {\n osz = isz;\n return -1;\n }\n memcpy(odata, idata, isz);\n osz = isz;\n return 0;\n }\n\n int DoDecode(const void* idata, size_t isz,\n void* odata, size_t& osz,\n size_t& consumed_sz) {\n if (osz < isz) {\n osz = isz;\n return -1;\n }\n memcpy(odata, idata, isz);\n osz = isz;\n consumed_sz = isz;\n return 0;\n }\n};\n\n\nclass DataCodecEncrypt : public IDataCodec {\npublic:\n int DoEncode(const void* idata, size_t isz,\n void* odata, size_t& osz) override {\n if (osz < isz) {\n osz = isz;\n return -1;\n }\n memcpy(odata, idata, isz);\n osz = isz;\n return 0;\n }\n\n int DoDecode(const void* idata, size_t isz,\n void* odata, size_t& osz,\n size_t& consumed_sz) {\n if (osz < isz) {\n osz = isz;\n return -1;\n }\n memcpy(odata, idata, isz);\n osz = isz;\n consumed_sz = isz;\n return 0;\n }\n};\n\n\nclass DataCodecCompose : public IDataCodec {\npublic:\n DataCodecCompose() {\n composes_.push_back(std::unique_ptr<IDataCodec>(new DataCodecPrefixHead()));\n }\n\n ~DataCodecCompose() {\n }\n\nprotected:\n int DoEncode(const void* idata, size_t isz,\n void* odata, size_t& osz) override {\n for (auto iter = composes_.rbegin(); iter != composes_.rend(); ++iter) {\n if ((*iter)->Encode(idata, isz, odata, osz)) { return -1; }\n }\n return 0;\n }\n\n int DoDecode(const void* idata, size_t isz,\n void* odata, size_t& osz,\n size_t& consumed_sz) override {\n for (auto iter = composes_.begin(); iter != composes_.end(); ++iter) {\n int result = (*iter)->Decode(idata, isz, odata, osz, consumed_sz);\n if (result) { return result; }\n }\n return 0;\n }\n\nprivate:\n std::vector<std::unique_ptr<IDataCodec> > composes_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_PROTO_WIRE_STREAM_H\n"
},
{
"alpha_fraction": 0.6428571343421936,
"alphanum_fraction": 0.7571428418159485,
"avg_line_length": 16.5,
"blob_id": "30c6bb7dd066cdcc33a44cdc29c5dd1d8b3580bf",
"content_id": "defe08ff1614e9dfa69f7c0ca2deb1190661b1fe",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Lua",
"length_bytes": 70,
"license_type": "permissive",
"max_line_length": 24,
"num_lines": 4,
"path": "/bin/tests.lua",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "function sparrow_main()\n\tprint(\"hello from LUA\")\n\treturn 20150620\nend\n"
},
{
"alpha_fraction": 0.3199999928474426,
"alphanum_fraction": 0.3199999928474426,
"avg_line_length": 9,
"blob_id": "786a809d20258a3dd0ec5d96bf2659ebcae39994",
"content_id": "f3b29a43b69f7b6720dde84c3687e8b470a01a4b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 50,
"license_type": "permissive",
"max_line_length": 23,
"num_lines": 5,
"path": "/build/gyp/define.gypi",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "{\n 'variables' : {\n 'ROOT_DIR': '../..'\n }\n}\n"
},
{
"alpha_fraction": 0.7068623304367065,
"alphanum_fraction": 0.7084490060806274,
"avg_line_length": 34.01388931274414,
"blob_id": "d52e20f6b04997579f7fa158cc55aabbdacb5afa",
"content_id": "2cfa945105baeba5bf15649b1db73a2cc9f336d2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2523,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 72,
"path": "/include/sparrow/sparrow_net.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_SPARROW_NET_H\n#define SPARROW_SPARROW_NET_H\n\n#include <string>\n#include <cstdint>\n\n#include \"sparrow_define.h\"\n#include \"i_serialize.h\"\n#include \"proto/data_codec.h\"\n#include \"backend_server.h\"\n\nNS_SPARROW_BEG\n\nstruct HarborCommand;\n\n// socket IO (internal used only)\nnamespace net {\n // put these lower-level I/O API into net namespace to avoid error use\n typedef void(*read_cb_t)(void*, connection_id_t, const void*, int32_t);\n typedef void(*close_cb_t)(void*, connection_id_t);\n\n int SetReadCallback(connection_id_t connid, read_cb_t cb, void* ud);\n \n int SetCloseCallback(connection_id_t connid, close_cb_t cb, void* ud);\n \n int SendPacket(connection_id_t connid,\n const void* data,\n size_t sz,\n IDataCodec* codec);\n\n int SendPacket(connection_id_t connid, \n const ISerialize& stream,\n IDataCodec* codec);\n \n int Disconnect(connection_id_t connid);\n}\n\n// harbor IO API\nint SendHarborCommand(node_id_t node_id, const HarborCommand& message);\nint SendMasterCommand(const HarborCommand& message);\nint SendClientCommand(client_uuid_t cuuid, const void* data, size_t sz);\n\n// RPC\nint ScheduleRpc(node_id_t node_id,\n uint32_t func,\n const VariantList& args,\n rpc_cb_t cb, void* ud);\n\nNS_SPARROW_END\n\n#endif // SPARROW_SPARROW_NET_H\n"
},
{
"alpha_fraction": 0.7024696469306946,
"alphanum_fraction": 0.7048216462135315,
"avg_line_length": 23.766990661621094,
"blob_id": "3b46f42c7df2e7db661f13e1a3cff9f8c07c0fe1",
"content_id": "11a713de9fdce83d7208d07dc4a0f9e8a27917a5",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2553,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 103,
"path": "/include/sparrow/server_cap.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_SERVER_CONFIG_H\n#define SPARROW_SERVER_CONFIG_H\n\n#include \"sparrow_define.h\"\n#include \"sparrow_net.h\"\n\nNS_SPARROW_BEG\n\n#include <cstdint>\n#include <string>\n#include <assert.h>\n\nenum class NodeType {\n Master, ///< master server\n Gate, ///< gateway server\n Backend, ///< backend server\n Unknown\n};\n\n\ninline const char* GetNodeTypeString(NodeType type) {\n switch (type) {\n case NodeType::Master:\n return \"master\";\n case NodeType::Gate:\n return \"gate\";\n case NodeType::Backend:\n return \"backend\";\n case NodeType::Unknown:\n return \"unknown\";\n default:\n assert(0);\n return \"\";\n }\n}\n\n\nstruct NodeCap {\n NodeCap() { \n node_type = NodeType::Unknown;\n node_id = INVALID_NODE_ID;\n harbor_port = 0;\n }\n virtual ~NodeCap() {}\n NodeType node_type;\n node_id_t node_id;\n std::string node_name;\n std::string harbor_addr;\n uint16_t harbor_port;\n};\n\n\nstruct MasterNodeCap : public NodeCap {\n MasterNodeCap() {\n node_type = NodeType::Master;\n }\n};\n\n\nstruct GateNodeCap : public NodeCap {\n std::string backend_server; ///< backend server\n std::string listen_addr;\n uint16_t listen_port;\n\n GateNodeCap() {\n node_type = NodeType::Gate;\n }\n};\n\n\nstruct BackendNodeCap : public NodeCap {\n std::string entry_module;\n std::string init_args;\n\n BackendNodeCap() {\n node_type = NodeType::Backend;\n }\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_SERVER_CONFIG_H\n"
},
{
"alpha_fraction": 0.7196044325828552,
"alphanum_fraction": 0.7207679152488708,
"avg_line_length": 34.8125,
"blob_id": "d4133df61a9ca80c60ff5d5303aaf0aa58aeab10",
"content_id": "5612210a1fc0c247998eee34f8a73f0595a1d02c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1721,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 48,
"path": "/include/sparrow/kernel.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_KERNEL_H\n#define SPARROW_KERNEL_H\n\n#include \"sparrow_define.h\"\n#include \"i_kernel.h\"\n#include \"i_entity_creator.h\"\n\nNS_SPARROW_BEG\n\nclass Kernel : public IKernel {\npublic:\n DECL_ENTITY_CREATOR(Kernel)\n\n int ScheduleRpc(node_id_t node_id,\n uint32_t func,\n const VariantList& args,\n rpc_cb_t cb,\n void* ud) override;\n\n int Send(client_uuid_t cuuid, const void* data, size_t sz) override;\n\n void Trace(TraceLevel level, const char* text) override;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_KERNEL_H\n"
},
{
"alpha_fraction": 0.4484412372112274,
"alphanum_fraction": 0.4484412372112274,
"avg_line_length": 18.85714340209961,
"blob_id": "4522cf8edad1cfef0bb2b6ab674dcc9215e71969",
"content_id": "65611287d0b258de8b2861723b59a78c8fb311ca",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 417,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 21,
"path": "/build/gyp/lua.gyp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "{\n 'includes': [\n 'define.gypi'\n ],\n 'targets':[\n {\n 'target_name':'lua',\n 'type':'static_library',\n 'include_dirs':[\n '<(ROOT_DIR)/external/lua/src'\n ],\n 'sources':[\n '<!@(find <(ROOT_DIR)/external/lua/src -type f -name \"*.c\")'\n ],\n 'sources!':[\n '<(ROOT_DIR)/external/lua/src/lua.c',\n '<(ROOT_DIR)/external/lua/src/luac.c'\n ]\n }\n ]\n}\n"
},
{
"alpha_fraction": 0.701640784740448,
"alphanum_fraction": 0.7025043368339539,
"avg_line_length": 36.35483932495117,
"blob_id": "22d02ad2c1ace16b2c158e3616a9f4c8872f1362",
"content_id": "a45ba550fc4c2f8b3be02210d9656f1f433db221",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2318,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 62,
"path": "/include/sparrow/proto/command_define.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_PROTO_COMMAND_DEFINE_H\n#define SPARROW_PROTO_COMMAND_DEFINE_H\n\n#include \"../sparrow_define.h\"\n\nNS_SPARROW_BEG\n\n// inter-harbor message type\nenum {\n CMD_HARBORSYN = 1,\n CMD_HARBORACK,\n CMD_HARBORMESSAGE,\n CMD_HARBORBEAT,\n CMD_MAX\n};\n\n\n// harbor sub message type\nenum {\n HBM_MASTERHARBORREADY, ///< master -> slaves, when receive all slave harbor's syn\n HBM_GATEREADY, ///< gate -> master, when gate finish init, ready for open gate \n HBM_OPENGATE, ///< master <-> gate, notify gate server to start accept client connection\n HBM_CLIENTONLINE, ///< gate -> logic, client connect to gate\n HBM_CLIENTOFFLINE, ///< gate -> logic, client disconnect to gate\n HBM_CLIENTLOGICMSG, ///< gate -> logic, forward data to master\n HBM_TRANSCLIENTDATA, ///< * -> gate, forward data to client\n HBM_KICKOFFCLIENT, ///< shutdown the connection between client and gate\n HBM_RPCCALL, ///< rpc client -> rpc server\n HBM_RPCRET, ///< rpc server -> rpc client \n};\n\n\n// client message type\nenum {\n CMD_AUTH = 1,\n CMD_LOGIC\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_PROTO_COMMAND_DEFINE_H\n"
},
{
"alpha_fraction": 0.6864432692527771,
"alphanum_fraction": 0.6893124580383301,
"avg_line_length": 29.120370864868164,
"blob_id": "b48074f4275d13595169fcd78300eafb9f0b8a92",
"content_id": "629005b4373419cdcb8d648b598a874fd5d35def",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9761,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 324,
"path": "/src/sparrow/harbor.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <vector>\n\n#include <sparrow/harbor.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/tcp_server.h>\n#include <sparrow/connector.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/proto/cluster.h>\n#include <sparrow/sparrow_application.h>\n#include <sparrow/event_define.h>\n#include <sparrow/event_emitter.h>\n#include <sparrow/event_proto.h>\n#include <sparrow/harbor_node.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(Harbor)\n\nHarbor::Harbor() {\n server_ = nullptr;\n node_id_ = INVALID_NODE_ID;\n}\n\n\nHarbor::~Harbor() {\n}\n\n\nint Harbor::ConnectHarborNet() {\n std::vector<node_id_t> harbors, connect;\n GetSlaveNodeIds(harbors);\n for (size_t i = 0; i < harbors.size(); ++i) {\n if (ShouldConnectHarbor(harbors[i])) {\n connect.push_back(harbors[i]);\n }\n }\n for (auto iter = connect.begin(); iter != connect.end(); ++iter) {\n const node_id_t node_id = *iter;\n ConnectHarbor(node_id);\n }\n return 0;\n}\n\n\nint Harbor::SendHarborMessage(node_id_t node_id, uint32_t command, \n VariantList&& args) {\n HarborNode* node = GetHarbor(node_id);\n if (!node) {\n Trace(TraceLevel::Error, \"SendHarborMessage: HarborNode not exist\");\n return -1;\n }\n HarborMessage harbor_message;\n harbor_message.message_type = command;\n harbor_message.message_data = std::move(args);\n return net::SendPacket(node->connid, harbor_message, GetInterClusterCodec());\n}\n\n\nuint32_t Harbor::GetNeighbourNodeCount() const {\n uint32_t count = 0;\n for (auto& iter : harbor_nodes_) {\n if (iter->connid != INVALID_CONNECTION_ID) { ++count; }\n }\n return count;\n}\n\n\nbool Harbor::IsNodeOnline(node_id_t node_id) const {\n HarborNode* node = GetHarbor(node_id);\n return node && node->connid != INVALID_CONNECTION_ID;\n}\n\n\nvoid Harbor::GetOnlineNodeIds(std::vector<node_id_t>& nodes) const {\n nodes.clear();\n for (auto& iter : harbor_nodes_) {\n if (iter->connid != INVALID_CONNECTION_ID) {\n nodes.push_back(iter->node_id);\n }\n }\n}\n\n\nbool Harbor::OnInit(const VariantList& args) {\n HarborInit cap;\n if (cap.Deserialize(args)) { return false; }\n TcpServerInit server_cap;\n server_cap.listen_addr = cap.listen_addr;\n server_cap.listen_port = cap.listen_port;\n node_id_ = cap.node_id;\n server_ = (TcpServer*)CreateEntity(\"TcpServer\", server_cap);\n if (!server_) { return false; }\n server_->SetConnectionCallback(&Harbor::OnHarborConnectIn, this);\n is_master_node_ = (INVALID_NODE_ID == cap.master_node_id);\n Trace(TraceLevel::Info, \"harbor started\");\n if (!is_master_node_) {\n ConnectHarbor(cap.master_node_id);\n }\n return true;\n}\n\n\nvoid Harbor::OnUninit() {\n if (server_) {\n server_->SetConnectionCallback(nullptr, nullptr);\n server_->Release();\n }\n}\n\n\nvoid Harbor::OnHarborConnectIn(void* ud, connection_id_t connid) {\n assert(INVALID_CONNECTION_ID != connid);\n auto self = (Harbor*)ud;\n HarborNode* node = new HarborNode(self, connid);\n node->SetMessageHandler(&Harbor::HandleHarborMessage, node);\n self->harbor_nodes_.insert(std::unique_ptr<HarborNode>(node));\n net::SetReadCallback(connid, &Harbor::OnHarborRead, self);\n net::SetCloseCallback(connid, &Harbor::OnHarborClosed, self);\n}\n\n\nvoid Harbor::OnHarborConnectOut(void* ud, connection_id_t connid) {\n HarborNode* node = (HarborNode*)ud;\n auto self = (Harbor*)node->ud;\n const node_id_t node_id = node->node_id;\n // remove corresponding connector\n auto iter = self->harbor_connectors_.find(node_id);\n assert(iter != self->harbor_connectors_.end());\n iter->second->SetConnectCallback(nullptr, nullptr);\n iter->second->Release();\n self->harbor_connectors_.erase(iter);\n // remove harbor when connect failed\n if (INVALID_CONNECTION_ID == connid) {\n Trace(TraceLevel::Error, \"connect harbor %d faild\", node->node_id);\n auto iter = self->GetHarborIter(node);\n assert(iter != self->harbor_nodes_.end());\n self->harbor_nodes_.erase(iter);\n return;\n }\n // connect peer harbor success\n node->connid = connid;\n net::SetReadCallback(connid, &Harbor::OnHarborRead, self);\n net::SetCloseCallback(connid, &Harbor::OnHarborClosed, self);\n HarborSyn syn;\n syn.node_id = GetSelfNodeId();\n self->SendHarborCommand(node, syn);\n}\n\n\nvoid Harbor::OnHarborRead(void* ud, connection_id_t connid, \n const void* data, int32_t sz) {\n Harbor* self = (Harbor*)ud;\n if (sz == 0) { return; }\n int result = -1;\n if (sz > 0) {\n HarborNode* node = self->GetHarbor(connid);\n assert(node);\n result = node->ProcessData(data, sz);\n }\n if (result) {\n net::Disconnect(connid);\n }\n}\n\n\nvoid Harbor::OnHarborClosed(void* ud, connection_id_t connid) {\n auto self = (Harbor*)ud;\n HarborNode* node = self->GetHarbor(connid);\n assert(node);\n EventHarborOffline e_harbor_offline;\n e_harbor_offline.node_id = node->node_id;\n GetEventEmitter()->Emit(EVENT_HARBOR_OFFLINE, e_harbor_offline);\n auto iter = self->GetHarborIter(node);\n assert(iter != self->harbor_nodes_.end());\n self->harbor_nodes_.erase(iter);\n}\n\n\nint Harbor::HandleHarborMessage(void* ud, uint16_t cmd,\n const VariantList& args) {\n HarborNode* node = (HarborNode*)ud;\n auto self = (Harbor*)node->ud;\n switch (cmd) {\n case CMD_HARBORSYN:\n self->HandleHarborSyn(node, args);\n break;\n case CMD_HARBORACK:\n self->HandleHarborAck(node, args);\n break;\n case CMD_HARBORMESSAGE:\n self->HandleHarborMessage(node, args);\n break;\n default:\n Trace(TraceLevel::Error, \"unknown harbor command %d\", cmd);\n break;\n }\n return 0;\n}\n\n\nint Harbor::SendHarborCommand(HarborNode* node, const ISerialize& command) {\n return net::SendPacket(node->connid, command, GetInterClusterCodec());\n}\n\n\nHarborNode* Harbor::GetHarbor(connection_id_t connid) const {\n for (auto& iter : harbor_nodes_) {\n if (iter->connid == connid) {\n return iter.get();\n }\n }\n return nullptr;\n}\n\n\nHarborNode* Harbor::GetHarbor(node_id_t node_id) const {\n for (auto& iter : harbor_nodes_) {\n if (iter->node_id == node_id) {\n return iter.get();\n }\n }\n return nullptr;\n}\n\n\nint Harbor::HandleHarborSyn(HarborNode* node, const VariantList& args) {\n HarborSyn sync;\n if (sync.Deserialize(args)) {\n Trace(TraceLevel::Error, \"HarborSyn decode failed\");\n assert(0);\n return -1;\n }\n Trace(TraceLevel::Info, \"receive [HarborSyn]\");\n node->node_id = sync.node_id;\n HarborAck ack;\n ack.node_id = GetSelfNodeId();\n SendHarborCommand(node, ack);\n EventHarborOnline e_harbor_online;\n e_harbor_online.node_id = node->node_id;\n GetEventEmitter()->Emit(EVENT_HARBOR_ONLINE, e_harbor_online);\n Trace(TraceLevel::Info, \"emit [EVENT_HARBOR_ONLINE]\");\n return 0;\n}\n\n\nint Harbor::HandleHarborAck(HarborNode* node, const VariantList& args) {\n HarborAck ack;\n if (ack.Deserialize(args)) {\n Trace(TraceLevel::Error, \"HarborAck decode failed\");\n assert(0);\n return -1;\n }\n Trace(TraceLevel::Info, \"receive [HarborAck]\");\n assert(node->node_id == ack.node_id);\n EventHarborOnline e_harbor_online;\n e_harbor_online.node_id = node->node_id;\n GetEventEmitter()->Emit(EVENT_HARBOR_ONLINE, e_harbor_online);\n Trace(TraceLevel::Info, \"emit [EVENT_HARBOR_ONLINE]\");\n return 0;\n}\n\n\nint Harbor::HandleHarborMessage(HarborNode* node, const VariantList& args) {\n HarborMessage harbor_message;\n int ret = harbor_message.Deserialize(args);\n if (ret) { \n Trace(TraceLevel::Error, \"HarborMessage decode failed\");\n assert(0);\n return -1; \n }\n Trace(TraceLevel::Info, \"receive [HarborMessage]\");\n EventHarborCommand e_harbor_command;\n e_harbor_command.source = node->node_id;\n e_harbor_command.args = std::move(harbor_message.message_data);\n auto sched = GetHarborCommandSched();\n sched->Emit(harbor_message.message_type, e_harbor_command);\n return 0;\n}\n\n\nint Harbor::ConnectHarbor(node_id_t node_id) {\n const NodeCap* server_cap = GetNodeCap(node_id);\n assert(server_cap);\n ConnectorInit connector_cap;\n connector_cap.peer_addr = server_cap->harbor_addr;\n connector_cap.peer_port = server_cap->harbor_port;\n Connector* connector;\n connector = (Connector*)CreateEntity(\"Connector\", connector_cap);\n if (connector) {\n harbor_connectors_.insert(std::make_pair(node_id, connector));\n HarborNode* node = new HarborNode(this, INVALID_CONNECTION_ID);\n node->node_id = node_id;\n node->SetMessageHandler(&Harbor::HandleHarborMessage, node);\n harbor_nodes_.insert(std::unique_ptr<HarborNode>(node));\n connector->SetConnectCallback(&Harbor::OnHarborConnectOut, node);\n } else {\n Trace(TraceLevel::Error, \"connector create failed, node id = %d\", node_id);\n }\n return 0;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6893448233604431,
"alphanum_fraction": 0.6965442895889282,
"avg_line_length": 34.16455841064453,
"blob_id": "ec759476d70f1cb6c3c239d41454b07f954a9c2f",
"content_id": "14a62ae5a16063f6cecb99011f7742466788a8ab",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2780,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 79,
"path": "/src/sparrow/stream_handler.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <sparrow/stream_handler.h>\n#include <sparrow/proto/data_codec.h>\n#include <sparrow/trace_log.h>\n\nNS_SPARROW_BEG\n\nStreamHandler::StreamHandler() {\n codec_ = nullptr;\n message_handler_ = nullptr;\n message_handler_ud_ = nullptr;\n}\n\n\nStreamHandler::~StreamHandler() {\n}\n\n\nint StreamHandler::ProcessData(const void* data, size_t sz) {\n const size_t old_sz = this->data_.size();\n this->data_.resize(old_sz + sz);\n memcpy(&this->data_[old_sz], data, sz);\n while (PendingSize() > 0) {\n size_t osize = 0, consumed_sz = 0;\n int result = codec_->Decode(&data_[0], data_.size(), nullptr, osize, consumed_sz);\n if (result == EWAITDATA) { return 0; }\n assert(result == EBUFOVERFLOW && osize > 0);\n std::unique_ptr<char[]> pack(new char[osize]);\n result = codec_->Decode(&data_[0], data_.size(), pack.get(), osize, consumed_sz);\n if (result) {\n Trace(TraceLevel::Error, \"wire data decode failed\");\n return -1;\n }\n VariantList vars;\n result = vars.Deserialize(pack.get(), osize);\n if (result) {\n Trace(TraceLevel::Error, \"data deserialize failed\");\n return -1;\n }\n const uint16_t cmd = vars.GetValue<uint16_t>(0);\n result = ProcessMessage(cmd, vars);\n data_.erase(data_.begin(), data_.begin() + consumed_sz);\n if (result) { return result; }\n }\n return 0;\n}\n\n\nint StreamHandler::ProcessMessage(uint16_t cmd, const VariantList& args) {\n return DoProcessMessage(cmd, args);\n}\n\n\nint StreamHandler::DoProcessMessage(uint16_t cmd, const VariantList& args) {\n if (message_handler_) { return message_handler_(message_handler_ud_, cmd, args); }\n return -1;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7650471329689026,
"alphanum_fraction": 0.76577228307724,
"avg_line_length": 35.28947448730469,
"blob_id": "7b07cdc163fba2efc2d202e3190a4b9ec570cf19",
"content_id": "8f8fc77ef558ae1028fee3739221f0cb3d8872ad",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1379,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 38,
"path": "/include/sparrow/event_define.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_EVENT_DEFINE_H\n#define SPARROW_EVENT_DEFINE_H\n\n#include \"sparrow_define.h\"\n\nNS_SPARROW_BEG\n\nenum {\n EVENT_BEGIN = 1,\n EVENT_HARBOR_ONLINE = EVENT_BEGIN,\n EVENT_HARBOR_OFFLINE,\n EVENT_END\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_EVENT_DEFINE_H\n"
},
{
"alpha_fraction": 0.6405750513076782,
"alphanum_fraction": 0.644568681716919,
"avg_line_length": 21.35714340209961,
"blob_id": "2e164a9e2c2965d99c5e95729d18350cb234375e",
"content_id": "831ef0cc0bce413c6013914ed15abc713337d3ee",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1252,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 56,
"path": "/src/sparrow/trace_log.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "#include <string>\n#include <string.h>\n#include <sparrow/trace_log.h>\n\n#define ELPP_STL_LOGGING\n#define ELPP_THREAD_SAFE\n#include <easylogging++.h>\n\nINITIALIZE_EASYLOGGINGPP\n\nNS_SPARROW_BEG\n\nstatic TraceLevel TRACE_LEVEL = TraceLevel::Trace;\n\nvoid SetTraceLevel(TraceLevel level) {\n TRACE_LEVEL = level;\n}\n\n\nvoid Trace(TraceLevel level, const char* format, ...) {\n if (level < TRACE_LEVEL) { return; }\n char buffer[128];\n size_t sz = sizeof(buffer);\n char* new_buffer = buffer;\n va_list args;\n va_start(args, format);\n while (true) {\n const int result = vsnprintf(new_buffer, sz, format, args);\n if (result >= 0 && result < (int)sz) { break; }\n sz <<= 1;\n new_buffer = (char*)realloc(new_buffer != buffer ? new_buffer : nullptr, sz);\n }\n switch (level) {\n case TraceLevel::Trace:\n LOG(TRACE) << new_buffer;\n break;\n case TraceLevel::Debug:\n LOG(DEBUG) << new_buffer;\n break;\n case TraceLevel::Fatal:\n LOG(FATAL) << new_buffer;\n break;\n case TraceLevel::Error:\n LOG(ERROR) << new_buffer;\n break;\n case TraceLevel::Warning:\n LOG(WARNING) << new_buffer;\n break;\n case TraceLevel::Info:\n LOG(INFO) << new_buffer;\n break;\n }\n if (new_buffer != buffer) { free(new_buffer); }\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6562298536300659,
"alphanum_fraction": 0.6881859302520752,
"avg_line_length": 43.24285888671875,
"blob_id": "11258a2ed72d6fe4d8c353736b98a19cb576e59c",
"content_id": "5bb37d18f14c92404c15eb0a7598ebd6ec41d1ec",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3098,
"license_type": "permissive",
"max_line_length": 101,
"num_lines": 70,
"path": "/bin/chat_client/ui_chat.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'F:\\projects\\sparrow\\bin\\chat_client\\ui_chat.ui'\n#\n# Created by: PyQt4 UI code generator 4.11.4\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt4 import QtCore, QtGui\n\ntry:\n _fromUtf8 = QtCore.QString.fromUtf8\nexcept AttributeError:\n def _fromUtf8(s):\n return s\n\ntry:\n _encoding = QtGui.QApplication.UnicodeUTF8\n def _translate(context, text, disambig):\n return QtGui.QApplication.translate(context, text, disambig, _encoding)\nexcept AttributeError:\n def _translate(context, text, disambig):\n return QtGui.QApplication.translate(context, text, disambig)\n\nclass Ui_Chat(object):\n def setupUi(self, Chat):\n Chat.setObjectName(_fromUtf8(\"Chat\"))\n Chat.setWindowModality(QtCore.Qt.NonModal)\n Chat.resize(597, 362)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Chat.sizePolicy().hasHeightForWidth())\n Chat.setSizePolicy(sizePolicy)\n font = QtGui.QFont()\n font.setFamily(_fromUtf8(\"Consolas\"))\n font.setPointSize(10)\n font.setBold(False)\n font.setWeight(50)\n Chat.setFont(font)\n self.label = QtGui.QLabel(Chat)\n self.label.setGeometry(QtCore.QRect(20, 20, 35, 13))\n self.label.setObjectName(_fromUtf8(\"label\"))\n self.lineEdit_gateaddr = QtGui.QLineEdit(Chat)\n self.lineEdit_gateaddr.setGeometry(QtCore.QRect(60, 20, 211, 20))\n self.lineEdit_gateaddr.setObjectName(_fromUtf8(\"lineEdit_gateaddr\"))\n self.textBrowser_messages = QtGui.QTextBrowser(Chat)\n self.textBrowser_messages.setGeometry(QtCore.QRect(20, 60, 421, 251))\n self.textBrowser_messages.setObjectName(_fromUtf8(\"textBrowser_messages\"))\n self.listView_users = QtGui.QListView(Chat)\n self.listView_users.setGeometry(QtCore.QRect(450, 60, 141, 251))\n self.listView_users.setObjectName(_fromUtf8(\"listView_users\"))\n self.lineEdit_message = QtGui.QLineEdit(Chat)\n self.lineEdit_message.setGeometry(QtCore.QRect(20, 330, 421, 20))\n self.lineEdit_message.setObjectName(_fromUtf8(\"lineEdit_message\"))\n self.button_send = QtGui.QPushButton(Chat)\n self.button_send.setGeometry(QtCore.QRect(480, 330, 75, 23))\n self.button_send.setObjectName(_fromUtf8(\"button_send\"))\n self.button_connect = QtGui.QPushButton(Chat)\n self.button_connect.setGeometry(QtCore.QRect(290, 20, 75, 23))\n self.button_connect.setObjectName(_fromUtf8(\"button_connect\"))\n\n self.retranslateUi(Chat)\n QtCore.QMetaObject.connectSlotsByName(Chat)\n\n def retranslateUi(self, Chat):\n Chat.setWindowTitle(_translate(\"Chat\", \"Chat Client\", None))\n self.label.setText(_translate(\"Chat\", \"Gate:\", None))\n self.button_send.setText(_translate(\"Chat\", \"Send\", None))\n self.button_connect.setText(_translate(\"Chat\", \"Connect\", None))\n\n"
},
{
"alpha_fraction": 0.6407835483551025,
"alphanum_fraction": 0.6445598006248474,
"avg_line_length": 29.9270076751709,
"blob_id": "6cee6f6f5b1b763b037f5a4ca20e86b50b74e827",
"content_id": "90b336d768bdb33e9bdac66feca98396c17cf269",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4237,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 137,
"path": "/bin/chat_client/proto.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "# Copyright http://www.gokulab.com. All rights reserved.\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\nimport struct\nfrom variant import VariantList\n\nclass DataCodec:\n def __init__(self):\n pass\n def Encode(self, data):\n pass\n def Decode(self, data):\n pass\n\nclass DataCodecPrefixHeader(DataCodec):\n def __init__(self):\n DataCodec.__init__(self)\n \n def Encode(self, data):\n new_data = struct.pack(\"<H\", 2 + len(data))\n new_data += data\n return new_data\n \n def Decode(self, data):\n if len(data) < 2:\n raise Exception(\"data too short\")\n pack_sz, = struct.unpack(\"<H\", data[0:2])\n if len(data) < pack_sz:\n raise Exception(\"data too short\")\n return (data[2:], pack_sz)\n\n\nclass CommonHeader:\n def __init__(self):\n self.command = 0\n \n def Serialize(self, varlist):\n varlist.addUInt16(self.command)\n \n def Deserialize(self, varlist):\n self.command = varlist.getValue(0)\n\n\nCMD_AUTH = 1\nCMD_LOGIC = 2\nCHAT_CMD_CHAT = 100\n \n \nclass AuthRequest(CommonHeader):\n def __init__(self):\n CommonHeader.__init__(self)\n self.command = CMD_AUTH\n self.uname = \"\"\n self.passwd = \"\"\n \n def Serialize(self, varlist):\n assert(isinstance(varlist, VariantList))\n CommonHeader.Serialize(self, varlist)\n varlist.addString(self.uname) \n varlist.addString(self.passwd)\n\n\nclass LogicRequest(CommonHeader):\n def __init__(self):\n CommonHeader.__init__(self)\n self.command = CMD_LOGIC\n self.message = None\n \n def Serialize(self, varlist):\n assert(isinstance(varlist, VariantList))\n assert(self.message != None)\n CommonHeader.Serialize(self, varlist)\n assert(isinstance(self.message, ChatCommand))\n var = VariantList()\n self.message.Serialize(var)\n varlist.addBlock(var.Serialize())\n \n \n# client -> gate\nclass ChatCommand(CommonHeader):\n def __init__(self):\n CommonHeader.__init__(self)\n self.command = CHAT_CMD_CHAT\n self.text = ''\n \n def Serialize(self, varlist):\n assert(isinstance(varlist, VariantList))\n CommonHeader.Serialize(self, varlist)\n varlist.addString(self.text)\n \n def Deserialize(self, varlist):\n CommonHeader.Deserialize(self, varlist);\n self.text = varlist.getValue(1)\n \n \n# gate -> client\nclass ChatMessage(CommonHeader):\n def __init__(self):\n CommonHeader.__init__(self)\n self.command = CHAT_CMD_CHAT\n self.message = ''\n \n def Serialize(self, varlist):\n assert(isinstance(varlist, VariantList))\n CommonHeader.Serialize(self, varlist)\n varlist.addString(self.message)\n \n def Deserialize(self, varlist):\n CommonHeader.Deserialize(self, varlist);\n self.message = varlist.getValue(1)\n \n\n# util funcion\ndef SendToGate(sock, logic_req):\n assert(isinstance(logic_req, LogicRequest))\n var = VariantList()\n logic_req.Serialize(var)\n codec = DataCodecPrefixHeader()\n data = codec.Encode(var.Serialize())\n sock.send(data)\n"
},
{
"alpha_fraction": 0.6811212301254272,
"alphanum_fraction": 0.6827982664108276,
"avg_line_length": 30.621212005615234,
"blob_id": "ef65f54661a11e038477c0df21620ff06f40c4d2",
"content_id": "17986a47d6170b3795367c811626876ad115f257",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4176,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 132,
"path": "/src/sparrow/connection_pool.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <sparrow/connection_pool.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/tcp_connection.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(ConnectionPool)\n\nbool ConnectionPool::OnInit(const VariantList& args) {\n next_connection_id_ = 0;\n return true;\n}\n\n\nvoid ConnectionPool::OnUninit() {\n close_cb_.clear();\n for (auto& iter : connections_) {\n TcpConnection* connection = iter.second;\n connection->SetReadCallback(nullptr, nullptr);\n connection->SetCloseCallback(nullptr, nullptr);\n connection->Release();\n }\n connections_.clear();\n}\n\n\nconnection_id_t ConnectionPool::AddConnection(uv_tcp_t* handle) {\n TcpConnectionInit init;\n init.connid = AllocConnectionId();\n init.handle = handle;\n TcpConnection* connection;\n connection = (TcpConnection*)CreateEntity(\"TcpConnection\", init);\n if (!connection) { return INVALID_CONNECTION_ID; }\n // we should hook the close event\n connection->SetCloseCallback(&ConnectionPool::OnConnectionClose, this);\n connections_.insert(std::make_pair(init.connid, connection));\n return init.connid;\n}\n\n\nint ConnectionPool::SetReadCallback(connection_id_t connid, \n net::read_cb_t cb, \n void* ud) {\n TcpConnection* connection = GetConnectionOfId(connid);\n if (!connection) {\n return -1;\n }\n connection->SetReadCallback(cb, ud);\n return 0;\n}\n\n\nint ConnectionPool::SetCloseCallback(connection_id_t connid, \n net::close_cb_t cb, \n void* ud) {\n TcpConnection* connection = GetConnectionOfId(connid);\n if (!connection) {\n return -1;\n }\n if (cb) {\n // set callback\n close_cb_[connid] = std::make_tuple(cb, ud);\n } else {\n // remove callback\n auto iter = close_cb_.find(connid);\n if (iter != close_cb_.end()) {\n close_cb_.erase(iter);\n }\n }\n return 0;\n}\n\n\nvoid ConnectionPool::OnConnectionClose(void* ud, connection_id_t connid) {\n ConnectionPool* self = (ConnectionPool*)ud;\n auto iter = self->close_cb_.find(connid);\n if (iter != self->close_cb_.end()) {\n net::close_cb_t cb;\n void* cb_ud;\n std::tie(cb, cb_ud) = iter->second;\n self->close_cb_.erase(iter);\n cb(cb_ud, connid);\n }\n auto iter_conn = self->connections_.find(connid);\n assert(iter_conn != self->connections_.end());\n TcpConnection* connection = iter_conn->second;\n connection->Release();\n self->connections_.erase(iter_conn);\n}\n\n\nint ConnectionPool::SendPacket(connection_id_t connid, const void* data, size_t sz) {\n TcpConnection* connection = GetConnectionOfId(connid);\n return connection ? connection->SendPacket(data, sz) : -1;\n}\n\n\nint ConnectionPool::Disconnect(connection_id_t connid, bool notify) {\n if (!notify) {\n auto iter = close_cb_.find(connid);\n if (iter != close_cb_.end()) {\n close_cb_.erase(iter);\n }\n }\n TcpConnection* connection = GetConnectionOfId(connid);\n if (!connection) { return -1; }\n return connection->Disconnect();\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7149486541748047,
"alphanum_fraction": 0.7149486541748047,
"avg_line_length": 32.522220611572266,
"blob_id": "5452794ca39e384804b5a62a6ee0e8c5ee6f8a9d",
"content_id": "b6af94c4e6b192eadbeecc019a81f1eefdf0390e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3019,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 90,
"path": "/include/sparrow/connection_pool.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_CONNECTION_POOL_H\n#define SPARROW_CONNECTION_POOL_H\n\n#include <map>\n#include <tuple>\n#include <uv.h>\n\n#include \"sparrow_define.h\"\n#include \"entity.h\"\n#include \"entity_factory.h\"\n#include \"sparrow_net.h\"\n#include \"trace_log.h\"\n\nNS_SPARROW_BEG\n\nclass TcpConnection;\n\nclass ConnectionPool : public Entity {\npublic:\n DECL_ENTITY_CREATOR(ConnectionPool)\n\n /**\n * @param handle alloc on the heap, when success, you should give up ownership\n */\n connection_id_t AddConnection(uv_tcp_t* handle);\n int SetReadCallback(connection_id_t connid, net::read_cb_t cb, void* ud);\n int SetCloseCallback(connection_id_t connid, net::close_cb_t cb, void* ud);\n\n int SendPacket(connection_id_t connid, const void* data, size_t sz);\n int Disconnect(connection_id_t connid, bool notify = true);\n\nprotected:\n bool OnInit(const VariantList& args) override;\n void OnUninit() override;\n\nprivate:\n static void OnConnectionClose(void* ud, connection_id_t connid);\n\n connection_id_t AllocConnectionId() {\n const connection_id_t begin = next_connection_id_;\n while (true) {\n connection_id_t id = ++next_connection_id_;\n if (id == begin) {\n Trace(TraceLevel::Info, \"connection id resource warning\");\n }\n if (id == INVALID_CONNECTION_ID) { continue; }\n auto iter = connections_.find(id);\n if (iter == connections_.end()) {\n return id;\n }\n }\n }\n\n TcpConnection* GetConnectionOfId(connection_id_t connid) const {\n auto iter = connections_.find(connid);\n if (iter == connections_.end()) { return nullptr; }\n return iter->second;\n }\n\nprivate:\n connection_id_t next_connection_id_;\n std::map<connection_id_t, TcpConnection*> connections_;\n typedef std::tuple<net::close_cb_t, void*> close_cb_info_t;\n std::map<connection_id_t, close_cb_info_t> close_cb_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_CONNECTION_POOL_H\n"
},
{
"alpha_fraction": 0.6610284447669983,
"alphanum_fraction": 0.6623815894126892,
"avg_line_length": 27.980392456054688,
"blob_id": "afee6c557f7cf864a18263e2557bb486d8a58ccf",
"content_id": "8ab1ba9e502604fcc03795c8141df39349bd423f",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4434,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 153,
"path": "/src/sparrow/tcp_connection.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#include <assert.h>\n\n#include <sparrow/tcp_connection.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/tcp_server.h>\n#include <sparrow/entity_init.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(TcpConnection)\n\nTcpConnection::TcpConnection() {\n handle_ = nullptr;\n read_cb_ = nullptr;\n read_cb_ud_ = nullptr;\n close_cb_ = nullptr;\n close_cb_ud_ = nullptr;\n}\n\n\nTcpConnection::~TcpConnection() {\n}\n\n\nbool TcpConnection::OnInit(const VariantList& args) {\n TcpConnectionInit cap;\n if (cap.Deserialize(args)) {\n return false;\n }\n handle_ = cap.handle;\n handle_->data = this;\n connid_ = cap.connid;\n int result;\n result = uv_read_start((uv_stream_t*)handle_, OnAlloc, OnRead);\n if (result) {\n Trace(TraceLevel::Error, \"uv_read_start failed: %d\", result);\n uv_close((uv_handle_t*)handle_, OnClosed);\n handle_ = nullptr;\n return false;\n }\n return true;\n}\n\n\nvoid TcpConnection::OnUninit() {\n if (handle_) {\n handle_->data = nullptr;\n Disconnect();\n }\n if (!send_queue_.empty()) { send_queue_.pop(); }\n while (!send_queue_.empty()) {\n data_blob_t* block = send_queue_.front();\n send_queue_.pop();\n delete block;\n }\n}\n\n\nint TcpConnection::SendPacket(const void* data, size_t sz) {\n data_blob_t* block = new data_blob_t(data, sz);\n const bool empty = send_queue_.empty();\n send_queue_.push(block);\n if (empty) {\n uv_write_t *req = new uv_write_t(); // must keep valid util written finish\n req->data = block;\n uv_buf_t buf = uv_buf_init((char*)block->data, block->sz);\n uv_write(req, (uv_stream_t*)handle_, &buf, 1, &TcpConnection::OnWritten);\n }\n return 0;\n}\n\n\nint TcpConnection::Disconnect() {\n assert(handle_);\n if (!handle_) { return -1; }\n uv_read_stop((uv_stream_t*)handle_);\n uv_close((uv_handle_t*)handle_, &TcpConnection::OnClosed);\n return 0;\n}\n\n\nvoid TcpConnection::OnAlloc(uv_handle_t* handle,\n size_t suggested_size,\n uv_buf_t* buf) {\n buf->base = (char*)malloc(suggested_size);\n buf->len = buf->base ? suggested_size : 0;\n}\n\n\nvoid TcpConnection::OnRead(uv_stream_t* handle, ssize_t nread, const uv_buf_t* buf) {\n TcpConnection* self = (TcpConnection*)handle->data;\n if (self && self->read_cb_) {\n self->read_cb_(self->read_cb_ud_, self->connid_, buf->base, nread);\n }\n free(buf->base);\n}\n\n\nvoid TcpConnection::OnWritten(uv_write_t* req, int status) {\n TcpConnection* self = (TcpConnection*)req->handle->data;\n data_blob_t* block = (data_blob_t*)req->data;\n delete block;\n delete req;\n if (status) {\n Trace(TraceLevel::Error, \"write failed: status = %d\", status);\n }\n if (self) {\n assert(block == self->send_queue_.front());\n self->send_queue_.pop();\n if (!self->send_queue_.empty()) {\n block = self->send_queue_.front();\n uv_buf_t buf = uv_buf_init((char*)block->data, block->sz);\n req = new uv_write_t();\n req->data = block;\n uv_write(req, (uv_stream_t*)self->handle_, &buf, 1, &TcpConnection::OnWritten);\n }\n }\n}\n\n\nvoid TcpConnection::OnClosed(uv_handle_t* handle) {\n TcpConnection* self = (TcpConnection*)handle->data;\n if (self) {\n self->handle_ = nullptr; // reset this before invoke callback.\n if (self->close_cb_) {\n self->close_cb_(self->close_cb_ud_, self->connid_);\n }\n }\n delete handle;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7105262875556946,
"alphanum_fraction": 0.7368420958518982,
"avg_line_length": 11.666666984558105,
"blob_id": "4c5de72947a880147f463d5fde42d55cbbd30591",
"content_id": "564f944cc22660d971caf61c833c51da6c576f3e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 38,
"license_type": "permissive",
"max_line_length": 26,
"num_lines": 3,
"path": "/bin/start_chat1.py",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "import os\n\nos.system(\"sparrow chat1\")\n"
},
{
"alpha_fraction": 0.657194972038269,
"alphanum_fraction": 0.6592804789543152,
"avg_line_length": 29.44444465637207,
"blob_id": "c0660f05242bcc42eebc047b60221607a4242cca",
"content_id": "b9955af058d5dd8c1ba02f22f8d44f4f0a6e40e6",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3838,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 126,
"path": "/src/sparrow/connector.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <cstdint>\n#include <assert.h>\n#include <string.h>\n\n#include <sparrow/connector.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/entity_init.h>\n#include <sparrow/tcp_connection.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(Connector)\n\nstruct connect_req_t {\n connect_req_t(const std::string addr, uint16_t port) {\n memset(this->addr, 0, sizeof(this->addr));\n assert(addr.size() < sizeof(this->addr));\n strcpy(this->addr, addr.c_str());\n this->port = port;\n }\n uv_connect_t req;\n char addr[32];\n uint16_t port;\n};\n\n\nbool Connector::OnInit(const VariantList& args) {\n handle_ = nullptr;\n connect_cb_ = nullptr;\n connect_cb_ud_ = nullptr;\n do {\n ConnectorInit cap;\n if (cap.Deserialize(args)) { break; }\n struct sockaddr_in peer;\n if (uv_ip4_addr(cap.peer_addr.c_str(), cap.peer_port, &peer)) {\n break;\n }\n handle_ = new uv_tcp_t();\n handle_->data = this;\n uv_tcp_init(uv_default_loop(), handle_);\n connect_req_t* req = new connect_req_t(cap.peer_addr, cap.peer_port);\n int ret = uv_tcp_connect(&req->req, handle_, (sockaddr*)&peer, \n &Connector::OnConnect);\n if (ret) {\n Trace(TraceLevel::Error, \"connect failed %s:%d\", req->addr, req->port);\n uv_close((uv_handle_t*)handle_, &Connector::OnClose);\n delete req;\n break;\n }\n return true;\n } while (false);\n return false;\n}\n\n\nvoid Connector::OnUninit() {\n if (handle_) {\n handle_->data = nullptr;\n uv_close((uv_handle_t*)handle_, &Connector::OnClose);\n handle_ = nullptr;\n }\n}\n\n\nvoid Connector::OnConnect(uv_connect_t* req, int status) {\n Connector* self = (Connector*)req->handle->data;\n connect_req_t *conn_req = (connect_req_t*)req;\n const char* addr = conn_req->addr;\n const int port = conn_req->port;\n Trace(TraceLevel::Info, \"connect to [%s:%d] status = %d\", addr, port, status);\n connection_id_t connid = INVALID_CONNECTION_ID;\n bool close = true;\n do {\n if (status || !self || !self->connect_cb_) {\n break; \n }\n connid = GetConnectionPool()->AddConnection(self->handle_);\n if (INVALID_CONNECTION_ID == connid) {\n break;\n }\n close = false;\n self->handle_ = nullptr;\n } while (false);\n if (close) {\n if (self) {\n self->handle_->data = nullptr;\n self->handle_ = nullptr; \n }\n uv_close((uv_handle_t*)req->handle, &Connector::OnClose);\n }\n if (self && self->connect_cb_) {\n self->connect_cb_(self->connect_cb_ud_, connid);\n }\n delete conn_req;\n}\n\n\nvoid Connector::OnClose(uv_handle_t* handle) {\n auto self = (Connector*)handle->data;\n if (self) { self->handle_ = nullptr; }\n delete (uv_tcp_t*)handle;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.5822187066078186,
"alphanum_fraction": 0.5848413109779358,
"avg_line_length": 28.106870651245117,
"blob_id": "b2456211f42f65d79fc6a93dfd905dfe12294da4",
"content_id": "a465ef3963866e87763f775c885e148fd19d8f83",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3815,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 131,
"path": "/include/sparrow/utils/circle_buffer.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <assert.h>\n#include <memory>\n#include \"../sparrow_define.h\"\n\nNS_SPARROW_BEG\n\nclass CircleBuffer {\npublic:\n CircleBuffer(size_t sz) {\n assert(sz > 0);\n buffer_sz_ = sz;\n rd_pos_ = wt_pos_ = 0;\n buffer_.reset(new char[buffer_sz_]);\n }\n\n bool Empty() const { return rd_pos_ == wt_pos_; }\n\n size_t GetReadableSize() const {\n if (rd_pos_ <= wt_pos_) {\n return wt_pos_ - rd_pos_;\n } else {\n return buffer_sz_ - rd_pos_ + wt_pos_;\n }\n }\n\n size_t GetConsecutiveReadableSize() const {\n if (wt_pos_ < rd_pos_) {\n return buffer_sz_ - rd_pos_;\n } else {\n return wt_pos_ - rd_pos_;\n }\n }\n\n void Read(void* buffer, size_t sz) {\n if (rd_pos_ <= wt_pos_) {\n const size_t readable_sz = wt_pos_ - rd_pos_;\n assert(sz <= readable_sz);\n memcpy(buffer, buffer_.get() + rd_pos_, sz);\n rd_pos_ += sz;\n } else {\n const size_t fst_blk_sz = buffer_sz_ - rd_pos_;\n const size_t snd_blk_sz = wt_pos_;\n assert(sz <= fst_blk_sz + snd_blk_sz);\n memcpy(buffer, buffer_.get() + rd_pos_, (std::min)(fst_blk_sz, sz));\n if (sz > fst_blk_sz) {\n sz -= fst_blk_sz;\n memcpy((char*)buffer + fst_blk_sz, buffer_.get(), sz);\n rd_pos_ = sz;\n } else {\n rd_pos_ += sz;\n if (rd_pos_ == buffer_sz_) {\n rd_pos_ = 0;\n }\n }\n }\n }\n\n size_t GetWritableSize() const {\n if (rd_pos_ <= wt_pos_) {\n return buffer_sz_ - wt_pos_ + rd_pos_ - 1;\n } else {\n return rd_pos_ - wt_pos_ - 1;\n }\n }\n\n size_t GetConsecutiveWritableSize() const {\n if (rd_pos_ <= wt_pos_) {\n return (std::min)(buffer_sz_ - wt_pos_, buffer_sz_ - 1);\n } else if (wt_pos_ < rd_pos_) {\n return rd_pos_ - wt_pos_ - 1;\n }\n }\n\n void Write(const void* buffer, size_t sz) {\n if (rd_pos_ <= wt_pos_) {\n const size_t fst_blk_sz = buffer_sz_ - wt_pos_;\n const size_t snd_blk_sz = rd_pos_ - 1;\n memcpy(buffer_.get() + wt_pos_, buffer, (std::min)(fst_blk_sz, sz));\n if (sz > fst_blk_sz) {\n sz -= fst_blk_sz;\n memcpy(buffer_.get(), (char*)buffer + fst_blk_sz, sz);\n wt_pos_ = sz;\n } else {\n wt_pos_ += sz;\n if (wt_pos_ == buffer_sz_) {\n wt_pos_ = 0;\n }\n }\n } else {\n const size_t blk_sz = rd_pos_ - wt_pos_ - 1;\n assert(sz <= blk_sz);\n }\n }\n\n void* GetGetReadBuffer() {\n return buffer_.get() + rd_pos_;\n }\n\n void* GetGetWriteBuffer() {\n return buffer_.get() + wt_pos_;\n }\n\nprivate:\n std::unique_ptr<char[]> buffer_;\n size_t buffer_sz_;\n size_t rd_pos_; // next read position\n size_t wt_pos_; // next write position\n};\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6841703057289124,
"alphanum_fraction": 0.6867193579673767,
"avg_line_length": 31.966386795043945,
"blob_id": "ed0c8601eafeb0c726344acc5436ecde3fcc76a3",
"content_id": "32020b22ddc7702a45b8919e1335f95723f14d94",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3925,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 119,
"path": "/src/sparrow/sparrow_net.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <memory>\n\n#include <sparrow/sparrow_net.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/proto/cluster.h>\n#include <sparrow/harbor.h>\n#include <sparrow/sparrow_define.h>\n#include <sparrow/sparrow_application.h>\n#include <sparrow/proto/cluster.h>\n\nNS_SPARROW_BEG\n\nnamespace net {\n\n int SetReadCallback(connection_id_t connid, read_cb_t cb, void* ud) {\n return GetConnectionPool()->SetReadCallback(connid, cb, ud);\n }\n\n\n int SetCloseCallback(connection_id_t connid, close_cb_t cb, void* ud) {\n return GetConnectionPool()->SetCloseCallback(connid, cb, ud);\n }\n\n\n int SendPacket(connection_id_t connid, const void* data, size_t sz,\n IDataCodec* codec) {\n if (codec) {\n size_t osize = 0;\n codec->Encode(data, sz, nullptr, osize);\n assert(osize > 0);\n std::unique_ptr<char[]> new_data(new char[osize]);\n int result = codec->Encode(data, sz, new_data.get(), osize);\n assert(result == 0);\n return GetConnectionPool()->SendPacket(connid, new_data.get(), osize);\n } else {\n return GetConnectionPool()->SendPacket(connid, data, sz);\n }\n }\n\n\n int SendPacket(connection_id_t connid, const ISerialize& stream,\n IDataCodec* codec) {\n VariantList vars;\n if (stream.Serialize(vars)) { return -1; }\n const size_t sz = vars.GetByteSize();\n if (!sz) { return -1; }\n std::unique_ptr<char[]> data(new char[sz]);\n vars.Serialize(data.get(), sz);\n return SendPacket(connid, data.get(), sz, codec);\n }\n\n\n int Disconnect(connection_id_t connid) {\n return GetConnectionPool()->Disconnect(connid);\n }\n\n} // end of namespace net\n\n\nstatic int SendHarborCommand(node_id_t node_id,\n uint32_t command,\n VariantList&& args) {\n Harbor* harbor = app->GetGlobalEntity<Harbor>(GLOBAL_ENT_HARBOR);\n if (!harbor) { return -1; }\n return harbor->SendHarborMessage(node_id, command, std::move(args));\n}\n\n\nint SendHarborCommand(node_id_t node_id, const HarborCommand& message) {\n VariantList var;\n int ret = message.Serialize(var);\n assert(!ret);\n if (ret) { return ret; }\n return SendHarborCommand(node_id, message.command, std::move(var));\n}\n\n\nint SendMasterCommand(const HarborCommand& message) {\n return SendHarborCommand(GetMasterNodeId(), message);\n}\n\n\nint SendClientCommand(client_uuid_t cuuid, const void* data, size_t sz) {\n TransClientData trans(cuuid, data, sz);\n return SendHarborCommand(cuuid.gate_id, trans);\n}\n\n\nint ScheduleRpc(node_id_t node_id,\n uint32_t func,\n const VariantList& args,\n rpc_cb_t cb, void* ud) {\n auto backend_server = GetBackendServer();\n assert(backend_server);\n return backend_server->ScheduleRpc(node_id, func, args, cb, ud);\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6803455948829651,
"alphanum_fraction": 0.6838012933731079,
"avg_line_length": 36.33871078491211,
"blob_id": "9c3499667eee107070176108fdc79e1ea8cf1a19",
"content_id": "1955e17cbb789cdbd97fa0fc9c7994ec1791ba6f",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2317,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 62,
"path": "/src/sparrow/crash_dump_win.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <assert.h>\n#include <windows.h>\n#include <time.h>\n#include <DbgHelp.h>\n#include <sparrow/crash_dump.h>\n\n#pragma comment(lib, \"DbgHelp.lib\")\n\nNS_SPARROW_BEG\n\nstatic LONG WINAPI OnException(_EXCEPTION_POINTERS *exception_pointers) {\n time_t now;\n time(&now);\n tm* tm = localtime(&now);\n char fname[256] = { 0 };\n strftime(fname, 80, \"%Y%m%d%H%M%S.dmp\", tm);\n HANDLE handle = ::CreateFileA(fname, GENERIC_WRITE, 0, NULL,\n CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);\n if (handle != INVALID_HANDLE_VALUE) {\n MINIDUMP_EXCEPTION_INFORMATION info;\n info.ThreadId = GetCurrentThreadId();\n info.ExceptionPointers = exception_pointers;\n info.ClientPointers = FALSE;\n BOOL ret = MiniDumpWriteDump(GetCurrentProcess(), \n GetCurrentProcessId(), \n handle, \n MiniDumpWithFullMemoryInfo, \n &info, NULL, NULL);\n assert(ret == TRUE);\n CloseHandle(handle);\n }\n return EXCEPTION_EXECUTE_HANDLER;\n}\n\n\nint crash_dump_init() {\n SetUnhandledExceptionFilter(OnException);\n return 0;\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.7058495879173279,
"alphanum_fraction": 0.7069637775421143,
"avg_line_length": 34.900001525878906,
"blob_id": "f38ad76295eb52c27b399d89a013dbffe92e0c52",
"content_id": "1ef83a30e2a63a2b5ea23b172fce102956488091",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1797,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 50,
"path": "/src/sparrow/kernel.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <assert.h>\n\n#include <sparrow/kernel.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/sparrow_net.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(Kernel)\n\nint Kernel::ScheduleRpc(node_id_t node_id,\n uint32_t func,\n const VariantList& args,\n rpc_cb_t cb,\n void* ud) {\n return goku::ScheduleRpc(node_id, func, args, cb, ud);\n}\n\n\nint Kernel::Send(client_uuid_t cuuid, const void* data, size_t sz) {\n return goku::SendClientCommand(cuuid, data, sz);\n}\n\n\nvoid Kernel::Trace(TraceLevel level, const char* text) {\n return goku::Trace(level, text);\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.6910094022750854,
"alphanum_fraction": 0.692307710647583,
"avg_line_length": 33.04419708251953,
"blob_id": "e0e55d26ca8d5fed8ef67ba1d061cc689376491d",
"content_id": "e651b06574278df32c989778de278513c9d8b28b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6164,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 181,
"path": "/src/sparrow/master_server.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <cstdint>\n#include <string>\n#include <time.h>\n#include <vector>\n#include <string.h>\n#include <assert.h>\n\n#include <sparrow/master_server.h>\n#include <sparrow/sparrow.h>\n#include <sparrow/sparrow_errno.h>\n#include <sparrow/proto/data_codec.h>\n#include <sparrow/proto/cluster.h>\n#include <sparrow/trace_log.h>\n#include <sparrow/sparrow_application.h>\n#include <sparrow/event_define.h>\n#include <sparrow/event_proto.h>\n#include <sparrow/sparrow_conf.h>\n\nNS_SPARROW_BEG\n\nIMPL_ENTITY_CREATOR(MasterServer)\n\nMasterServer::MasterServer() {\n}\n\n\nMasterServer::~MasterServer() {\n}\n\n\nbool MasterServer::OnInit(const VariantList& args) {\n AddEventListener(EVENT_HARBOR_ONLINE, &MasterServer::OnEventHarborOnline, this);\n AddEventListener(EVENT_HARBOR_OFFLINE, &MasterServer::OnEventHarborOffline, this);\n AddHarborCommandListener(HBM_GATEREADY, &MasterServer::OnCommandGateReady, this);\n AddHarborCommandListener(HBM_OPENGATE, &MasterServer::OnCommandOpenGateResponse, this);\n return true;\n}\n\n\nvoid MasterServer::OnUninit() {\n RemoveEventListener(EVENT_HARBOR_ONLINE, &MasterServer::OnEventHarborOnline);\n RemoveEventListener(EVENT_HARBOR_OFFLINE, &MasterServer::OnEventHarborOffline);\n RemoveHarborCommandListener(HBM_GATEREADY, &MasterServer::OnCommandGateReady);\n RemoveHarborCommandListener(HBM_OPENGATE, &MasterServer::OnCommandOpenGateResponse);\n}\n\n\nvoid MasterServer::OnEventHarborOnline(void* ud, uint32_t signal,\n const VariantList& args) {\n auto self = (MasterServer*)ud;\n EventHarborOnline e_harbor_online;\n int ret = e_harbor_online.Deserialize(args);\n assert(!ret);\n const node_id_t node_id = e_harbor_online.node_id;\n const std::string node_name = GetNodeName(node_id);\n Trace(TraceLevel::Info, \"harbor node online %s\", node_name.c_str());\n std::unique_ptr<SlaveNode> node(new SlaveNode(node_id));\n self->slaves_.insert(std::make_pair(node_id, std::move(node)));\n if (self->IsClustersStartUp()) {\n Trace(TraceLevel::Info, \"all cluster server has start up\");\n Trace(TraceLevel::Info, \"notify to build harbor net\");\n MasterHarborReady notify;\n self->Broadcast(notify);\n }\n}\n\n\nvoid MasterServer::OnEventHarborOffline(void* ud, uint32_t signal, \n const VariantList& args) {\n auto self = (MasterServer*)ud;\n EventHarborOffline e_harbor_offline;\n int ret = e_harbor_offline.Deserialize(args);\n assert(!ret);\n auto iter = self->slaves_.find(e_harbor_offline.node_id);\n assert(iter != self->slaves_.end());\n self->slaves_.erase(iter);\n const std::string node_name = GetNodeName(e_harbor_offline.node_id);\n Trace(TraceLevel::Info, \"harbor node offline %s\", node_name.c_str());\n}\n\n\nvoid MasterServer::Broadcast(const HarborCommand& message) {\n for (auto& iter : slaves_) {\n SendHarborCommand(iter.first, message);\n }\n}\n\n\nvoid MasterServer::Broadcast(const HarborCommand& message,\n const std::function<bool(const SlaveNode&)>& pred) {\n for (auto& iter : slaves_) {\n if (pred(*iter.second)) {\n SendHarborCommand(iter.first, message);\n }\n }\n}\n\n\nvoid MasterServer::BroadcastByNodeType(const HarborCommand& message, \n NodeType type) {\n Broadcast(message, [type](const SlaveNode& node) -> bool {\n auto cap = GetNodeCap(node.GetNodeId());\n return cap->node_type == type;\n });\n}\n\n\nbool MasterServer::IsClustersStartUp() const {\n auto harbor = GetHarbor();\n std::vector<node_id_t> slaves;\n GetSlaveNodeIds(slaves);\n return slaves.size() == harbor->GetNeighbourNodeCount();\n}\n\n\nSlaveNode* MasterServer::GetSlaveNode(node_id_t node_id) const {\n for (auto& iter : slaves_) {\n if (iter.second->GetNodeId() == node_id) {\n return iter.second.get();\n }\n }\n return nullptr;\n}\n\n\nvoid MasterServer::OnCommandGateReady(void* ud,\n uint32_t signal,\n const VariantList& args) {\n node_id_t source = INVALID_NODE_ID;\n GateReady notify;\n int ret = ExtractHarborCommand(args, source, notify);\n assert(!ret);\n auto self = (MasterServer*)ud;\n auto node = self->GetSlaveNode(source);\n assert(GetNodeCap(node->GetNodeId())->node_type == NodeType::Gate);\n node->set_ready_open_gate(true);\n assert(self->IsClustersStartUp());\n for (auto& iter : self->slaves_) {\n auto& node = iter.second;\n if (node->GetNodeType() == NodeType::Gate && !node->ready_open_gate()) {\n return;\n }\n }\n Trace(TraceLevel::Info, \"All ready, now notify the gate open\");\n OpenGate req;\n self->BroadcastByNodeType(req, NodeType::Gate);\n}\n\n\nvoid MasterServer::OnCommandOpenGateResponse(void* ud,\n uint32_t signal,\n const VariantList& args) {\n node_id_t source = INVALID_NODE_ID;\n OpenGateResponse response;\n int ret = ExtractHarborCommand(args, source, response);\n assert(!ret);\n Trace(TraceLevel::Info, \"gate open result = %d\", response.res);\n}\n\nNS_SPARROW_END\n"
},
{
"alpha_fraction": 0.5988567471504211,
"alphanum_fraction": 0.6170039176940918,
"avg_line_length": 26.415422439575195,
"blob_id": "d1e4e74167c83a671cec45f832a13547aeb3b5c5",
"content_id": "7e854b486fc1f6d74011fba0b22d415dae9fa8d9",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 11021,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 402,
"path": "/include/sparrow/variant.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_VARIANT_H\n#define SPARROW_VARIANT_H\n\n#include <cstdint>\n#include <list>\n#include <vector>\n#include <string>\n#include <string.h>\n#include <assert.h>\n#include <algorithm>\n\n#include \"sparrow_define.h\"\n\nNS_SPARROW_BEG\n\nenum class variant_type {\n INT8,\n INT16,\n INT32,\n INT64,\n UINT8,\n UINT16,\n UINT32,\n UINT64,\n FLOAT,\n DOUBLE,\n POINTER,\n STRING,\n BLOCK,\n UNKNOWN\n};\n\n\nstruct block_t {\n void* data;\n size_t sz;\n};\n\n\nstruct variant_t {\n variant_type type;\n union {\n int8_t i8_value;\n int16_t i16_value;\n int32_t i32_value;\n int64_t i64_value;\n uint8_t u8_value;\n uint16_t u16_value;\n uint32_t u32_value;\n uint64_t u64_value;\n float flt_value;\n double dbl_value;\n void* ptr_value;\n char* str_value;\n block_t block_value;\n };\n\n variant_t() { type = variant_type::UNKNOWN; }\n variant_t(int8_t value) { i8_value = value; type = variant_type::INT8; }\n variant_t(int16_t value) { i16_value = value; type = variant_type::INT16; }\n variant_t(int32_t value) { i32_value = value; type = variant_type::INT32; }\n variant_t(int64_t value) { i64_value = value; type = variant_type::INT64; }\n variant_t(uint8_t value) { u8_value = value; type = variant_type::UINT8; }\n variant_t(uint16_t value) { u16_value = value; type = variant_type::UINT16; }\n variant_t(uint32_t value) { u32_value = value; type = variant_type::UINT32; }\n variant_t(uint64_t value) { u64_value = value; type = variant_type::UINT64; }\n variant_t(float value) { flt_value = value; type = variant_type::FLOAT; }\n variant_t(double value) { dbl_value = value; type = variant_type::DOUBLE; }\n variant_t(void* value) { ptr_value = value; type = variant_type::POINTER; }\n#ifdef __GNUG__\n variant_t(unsigned long value) { u64_value = value; type = variant_type::UINT64; }\n#endif // __GNUG__\n\n variant_t(const char* value) { \n init_str(value);\n }\n\n variant_t(const void* data, size_t sz) {\n init_block(data, sz);\n }\n\n variant_t(const variant_t& rhs) {\n memcpy(this, &rhs, sizeof(*this));\n if (variant_type::STRING == type) {\n init_str(rhs.str_value);\n } else if (variant_type::BLOCK == type) {\n init_block(rhs.block_value.data, rhs.block_value.sz);\n }\n }\n\n variant_t(variant_t&& rhs) {\n this->type = rhs.type;\n this->block_value = rhs.block_value;\n rhs.type = variant_type::UNKNOWN;\n }\n\n ~variant_t() {\n Clear();\n }\n\n variant_t& operator=(const variant_t& rhs) {\n if (this != &rhs) {\n Clear();\n memcpy(this, &rhs, sizeof(*this));\n if (variant_type::STRING == type) {\n init_str(rhs.str_value);\n } else if (variant_type::BLOCK == type) {\n init_block(rhs.block_value.data, rhs.block_value.sz);\n }\n }\n return *this;\n }\n\n variant_t& operator=(variant_t&& rhs) {\n Clear();\n this->type = rhs.type;\n this->block_value = rhs.block_value;\n rhs.type = variant_type::UNKNOWN;\n return *this;\n }\n\n operator int8_t() const { assert(variant_type::INT8 == type); return i8_value; }\n operator int16_t() const { assert(variant_type::INT16 == type); return i16_value; }\n operator int32_t() const { assert(variant_type::INT32 == type); return i32_value; }\n operator int64_t() const { assert(variant_type::INT64 == type); return i64_value; }\n operator uint8_t() const { assert(variant_type::UINT8 == type); return u8_value; }\n operator uint16_t() const { assert(variant_type::UINT16 == type); return u16_value; }\n operator uint32_t() const { assert(variant_type::UINT32 == type); return u32_value; }\n operator uint64_t() const { assert(variant_type::UINT64 == type); return u64_value; }\n operator float() const { assert(variant_type::FLOAT == type); return flt_value; }\n operator double() const { assert(variant_type::DOUBLE == type); return dbl_value; }\n operator const char*() const { assert(variant_type::STRING == type); return str_value; }\n operator void*() const { assert(variant_type::POINTER == type); return ptr_value; }\n operator block_t() const { assert(variant_type::BLOCK == type); return block_value; }\n\n template<typename T>\n operator T*() const { assert(variant_type::POINTER == type); return (T*)ptr_value; }\n\n#ifdef __GNUG__\n operator unsigned long() const { assert(variant_type::UINT64 == type); return u64_value; }\n#endif // __GNUG__\n\n\n size_t GetByteSize() const {\n size_t sz = 1; // used for save type info\n switch (type) {\n case variant_type::INT8:\n case variant_type::UINT8:\n sz += 1;\n break;\n case variant_type::INT16:\n case variant_type::UINT16:\n sz += 2;\n break;\n case variant_type::INT32:\n case variant_type::UINT32:\n case variant_type::FLOAT:\n sz += 4;\n break;\n case variant_type::INT64:\n case variant_type::UINT64:\n case variant_type::DOUBLE:\n sz += 8;\n break;\n case variant_type::POINTER:\n sz += sizeof(void*);\n break;\n case variant_type::STRING:\n sz += sizeof(uint16_t) + strlen(str_value);\n break;\n case variant_type::BLOCK:\n sz += sizeof(uint16_t) + block_value.sz;\n break;\n case variant_type::UNKNOWN:\n assert(0);\n break;\n }\n return sz;\n }\n\n int Serialize(void* data) const {\n const size_t osz = GetByteSize();\n *(uint8_t*)data = (uint8_t)type;\n if (variant_type::STRING == type) {\n size_t str_sz = strlen(str_value);\n *(uint16_t*)((char*)data + 1) = (uint16_t)str_sz;\n memcpy((char*)data + 3, str_value, str_sz);\n } else if (variant_type::BLOCK == type) {\n *(uint16_t*)((char*)data + 1) = (uint16_t)block_value.sz;\n memcpy((char*)data + 3, block_value.data, block_value.sz);\n } else {\n memcpy((char*)data + 1, &i8_value, osz - 1);\n }\n return 0;\n }\n\n int Deserialize(const void* data) {\n Clear();\n type = (variant_type)*(uint8_t*)data;\n if (variant_type::STRING == type) {\n const size_t str_sz = *(uint16_t*)((char*)data + 1);\n str_value = new char[str_sz + 1];\n memcpy(str_value, (char*)data + 3, str_sz);\n str_value[str_sz] = 0;\n } else if (variant_type::BLOCK == type) {\n block_value.sz = *(uint16_t*)((char*)data + 1);\n if (block_value.sz) {\n block_value.data = malloc(block_value.sz);\n memcpy(block_value.data, (char*)data + 3, block_value.sz);\n } else {\n block_value.data = nullptr;\n }\n } else {\n memcpy(&i8_value, (char*)data + 1, GetByteSize() - 1);\n }\n return 0;\n }\n\n void Clear() {\n if (variant_type::STRING == type) {\n delete [] str_value;\n }\n if (variant_type::BLOCK == type && block_value.data) {\n free(block_value.data);\n }\n type = variant_type::UNKNOWN;\n }\n\nprivate:\n void init_str(const char* str) {\n type = variant_type::STRING;\n size_t sz = strlen(str) + 1;\n str_value = new char[sz];\n memcpy(str_value, str, sz);\n }\n\n void init_block(const void* data, size_t sz) {\n type = variant_type::BLOCK;\n block_value.sz = sz;\n if (sz) {\n block_value.data = malloc(sz);\n memcpy(block_value.data, data, sz);\n } else {\n block_value.data = nullptr;\n }\n }\n};\n\n\nclass VariantList {\npublic:\n VariantList() {}\n\n VariantList(const VariantList& other) {\n for (auto& iter : other.data_) {\n data_.push_back(*iter);\n }\n }\n\n VariantList& operator=(const VariantList& other) {\n if (this != &other) {\n Clear();\n for (auto& iter : other.data_) {\n data_.push_back(iter);\n }\n }\n return *this;\n }\n\n VariantList(VariantList&& other) : data_(std::move(other.data_)) {}\n\n VariantList& operator=(VariantList&& other) {\n this->data_ = std::move(other.data_);\n return *this;\n }\n\n size_t Size() const { return data_.size(); }\n\n template<typename T>\n VariantList& AddValue(const T& value) {\n data_.push_back(variant_t(value));\n return *this;\n }\n\n VariantList& AddValue(const void* data, size_t sz) {\n data_.push_back(variant_t(data, sz));\n return *this;\n }\n\n template<typename T>\n VariantList& AddValue(const std::vector<T>& value) {\n if (value.empty()) {\n data_.push_back(variant_t(nullptr, 0));\n } else {\n data_.push_back(variant_t(&value[0], value.size() * sizeof(T)));\n }\n return *this;\n }\n\n VariantList& AddValue(const std::string& value) {\n data_.push_back(variant_t(value.c_str()));\n return *this;\n }\n\n template<typename T>\n T GetValue(size_t index) const {\n auto beg = data_.begin();\n std::advance(beg, index);\n return *beg;\n }\n\n template<typename T>\n T& GetValue(size_t index, T& value) const {\n value = GetValue<T>(index);\n return value;\n }\n\n variant_t operator[](size_t index) {\n auto beg = data_.begin();\n std::advance(beg, index);\n return *beg;\n }\n\n variant_t operator[](size_t index) const {\n return const_cast<VariantList*>(this)->operator[](index);\n }\n\n void Clear() {\n data_.clear();\n }\n\n variant_type GetType(size_t index) const {\n auto beg = data_.begin();\n std::advance(beg, index);\n return beg->type;\n }\n\n template<typename T>\n VariantList& operator<<(const T& value) {\n return AddValue(value);\n }\n\n size_t GetByteSize() const {\n size_t sz = 0;\n for (auto& var : data_) {\n sz += var.GetByteSize();\n }\n return sz;\n }\n \n int Serialize(void* data, size_t sz) const {\n if (sz < GetByteSize()) { return -1; }\n size_t offset = 0;\n for (auto& var : data_) {\n var.Serialize((char*)data + offset);\n offset += var.GetByteSize();\n }\n return 0;\n }\n\n int Deserialize(const void* data, size_t sz) {\n data_.clear();\n size_t offset = 0;\n while (sz) {\n variant_t var;\n var.Deserialize((char*)data + offset);\n data_.push_back(var);\n const size_t piece_sz = var.GetByteSize();\n sz -= piece_sz;\n offset += piece_sz;\n }\n return 0;\n }\n\nprivate:\n std::list<variant_t> data_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_VARIANT_H\n"
},
{
"alpha_fraction": 0.6533392071723938,
"alphanum_fraction": 0.6561951041221619,
"avg_line_length": 32.47058868408203,
"blob_id": "ebb58a4daea35727c2ff880583fcad50aca4248a",
"content_id": "b3d038aab4b9d95175fd638b436681fa6c0b9e87",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4554,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 136,
"path": "/test_modules/chat_module/chat_rpc_logic.cpp",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#include <WinSock2.h>\n#include <sparrow/i_kernel.h>\n#include \"chat_rpc_logic.h\"\n#include \"chat_proto.h\"\n\nint ChatRpcLogic::OnRpcRequest(IKernel* kernel,\n uint32_t request,\n const goku::VariantList& args,\n goku::VariantList& result) {\n kernel_ = kernel;\n switch (request) {\n case RPC_CHAT_ADDPLAYER:\n HandleChatRpcAddPlayer(args, result);\n break;\n case RPC_CHAT_RMPLAYER:\n HandleChatRpcRemovePlayer(args, result);\n break;\n case RPC_CHAT_SETNICKNAME:\n HandleChatRpcSetNickName(args, result);\n break;\n case RPC_CHAT_WORLDMESSAGE:\n HandleChatRpcWorldMessage(args, result);\n break;\n case RPC_CHAT_WHISPERMESSAGE:\n HandleChatRpcWhisperMessage(args, result);\n break;\n default:\n return -1;\n }\n return 0;\n}\n\n\nvoid ChatRpcLogic::Send(IKernel* kernel, \n client_uuid_t cuuid, \n const std::string& message) {\n ChatMessage chat_message;\n chat_message.message = message;\n VariantList var;\n int ret = chat_message.Serialize(var);\n assert(!ret);\n const size_t sz = var.GetByteSize();\n assert(sz);\n std::unique_ptr<char[]> data(new char[sz]);\n ret = var.Serialize(data.get(), sz);\n assert(!ret);\n kernel->Send(cuuid, data.get(), sz);\n}\n\n\nvoid ChatRpcLogic::HandleChatRpcAddPlayer(const VariantList& args,\n VariantList& result) {\n ChatRpcAddPlayer cmd;\n int ret = cmd.Deserialize(args);\n assert(!ret);\n auto iter = all_users_.find(cmd.cuuid);\n assert(iter == all_users_.end());\n all_users_.insert(cmd.cuuid);\n}\n\n\nvoid ChatRpcLogic::HandleChatRpcRemovePlayer(const VariantList& args,\n VariantList& result) {\n ChatRpcRemovePlayer cmd;\n int ret = cmd.Deserialize(args);\n assert(!ret);\n auto iter = all_users_.find(cmd.cuuid);\n assert(iter != all_users_.end());\n all_users_.erase(iter);\n}\n\n\nvoid ChatRpcLogic::HandleChatRpcSetNickName(const VariantList& args,\n VariantList& result) {\n ChatRpcSetNickName cmd;\n int ret = cmd.Deserialize(args);\n assert(!ret);\n ChatRpcSetNickNameResponse response;\n response.cuuid = cmd.cuuid;\n auto iter = name_to_uuid_.find(cmd.nick_name);\n if (iter == name_to_uuid_.end()) {\n name_to_uuid_.insert(std::make_pair(cmd.nick_name, cmd.cuuid));\n response.nick_name = cmd.nick_name;\n response.succeed = 1;\n } else {\n response.succeed = 0;\n }\n response.Serialize(result);\n}\n\n\nvoid ChatRpcLogic::HandleChatRpcWorldMessage(const VariantList& args,\n VariantList& result) {\n ChatRpcWorldMessage cmd;\n int ret = cmd.Deserialize(args);\n assert(!ret);\n for (auto& iter : all_users_) {\n char message[512];\n sprintf(message, \"[WORLD] [%s] say: %s\", cmd.sender.c_str(), cmd.content.c_str());\n Send(kernel_, iter, message);\n }\n}\n\n\nvoid ChatRpcLogic::HandleChatRpcWhisperMessage(const VariantList& args,\n VariantList& result) {\n ChatRpcWhisperMessage cmd;\n int ret = cmd.Deserialize(args);\n assert(!ret);\n auto iter = name_to_uuid_.find(cmd.target_name);\n if (iter == name_to_uuid_.end()) { return; }\n char message[512];\n sprintf(message, \"[WHIP] [%s] say to you: %s\", cmd.target_name.c_str(), cmd.content.c_str());\n Send(kernel_, iter->second, message);\n}\n"
},
{
"alpha_fraction": 0.702650785446167,
"alphanum_fraction": 0.702650785446167,
"avg_line_length": 26.989246368408203,
"blob_id": "af6133d8f03a8545d653f2726467ccbacb61bf49",
"content_id": "a4903cd8bd2a43f654c0d266f8876d93be81fcb0",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2603,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 93,
"path": "/include/sparrow/tcp_connection.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n*\n* Permission is hereby granted, free of charge, to any person obtaining a copy\n* of this software and associated documentation files (the \"Software\"), to\n* deal in the Software without restriction, including without limitation the\n* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n* sell copies of the Software, and to permit persons to whom the Software is\n* furnished to do so, subject to the following conditions:\n*\n* The above copyright notice and this permission notice shall be included in\n* all copies or substantial portions of the Software.\n*\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n* IN THE SOFTWARE.\n*/\n\n#ifndef SPARROW_TCP_CONNECTION_H\n#define SPARROW_TCP_CONNECTION_H\n\n#include <queue>\n#include <string.h>\n#include <uv.h>\n\n#include \"entity.h\"\n#include \"sparrow_define.h\"\n#include \"i_entity_creator.h\"\n#include \"i_serialize.h\"\n#include \"sparrow_net.h\"\n\nNS_SPARROW_BEG\n\nclass TcpConnection : public Entity {\npublic:\n DECL_ENTITY_CREATOR(TcpConnection)\n\n TcpConnection();\n ~TcpConnection();\n\n void SetReadCallback(net::read_cb_t cb, void* ud) {\n read_cb_ = cb;\n read_cb_ud_ = ud;\n }\n\n void SetCloseCallback(net::close_cb_t cb, void* ud) {\n close_cb_ = cb;\n close_cb_ud_ = ud;\n }\n\n int SendPacket(const void* data, size_t sz);\n int Disconnect();\n\nprotected:\n bool OnInit(const VariantList& args) override;\n void OnUninit() override;\n\nprivate:\n struct data_blob_t {\n data_blob_t(const void* data, size_t sz) {\n this->data = malloc(sz);\n memcpy(this->data, data, sz);\n this->sz = sz;\n }\n ~data_blob_t() {\n free(data);\n }\n void* data;\n size_t sz;\n };\n\nprivate:\n static void OnAlloc(uv_handle_t*, size_t, uv_buf_t*);\n static void OnRead(uv_stream_t*, ssize_t, const uv_buf_t*);\n static void OnWritten(uv_write_t* req, int status);\n static void OnClosed(uv_handle_t* handle);\n\nprivate:\n uv_tcp_t* handle_;\n net::read_cb_t read_cb_;\n void* read_cb_ud_;\n net::close_cb_t close_cb_;\n void* close_cb_ud_;\n connection_id_t connid_;\n std::queue<data_blob_t*> send_queue_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_TCP_CONNECTION_H\n"
},
{
"alpha_fraction": 0.7100290656089783,
"alphanum_fraction": 0.7136628031730652,
"avg_line_length": 32.56097412109375,
"blob_id": "dc5e258a2632c738c6599e390ce06c379719e868",
"content_id": "6073dd9ff34844c7f14fb50089bc53a2fcae51bd",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2754,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 82,
"path": "/include/sparrow/backend_server.h",
"repo_name": "gokulab/sparrow",
"src_encoding": "UTF-8",
"text": "/* Copyright http://www.gokulab.com. All rights reserved.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\n#ifndef SPARROW_LOGIC_SERVER_H\n#define SPARROW_LOGIC_SERVER_H\n\n#include <memory>\n#include <string>\n#include <map>\n\n#include \"sparrow_define.h\"\n#include \"entity.h\"\n#include \"i_entity_creator.h\"\n#include \"connector.h\"\n#include \"proto/data_codec.h\"\n#include \"master_client.h\"\n#include \"game_player.h\"\n\nNS_SPARROW_BEG\n\nclass TcpServer;\nclass GateNode;\nclass BackendLogicModule;\n\ntypedef void(*rpc_cb_t)(uint32_t func,\n int result,\n const VariantList& value,\n void* ud);\n\nclass BackendServer : public MasterClient {\npublic:\n DECL_ENTITY_CREATOR(BackendServer)\n\n BackendServer();\n ~BackendServer();\n\n int ScheduleRpc(node_id_t node_id,\n uint32_t func,\n const VariantList& args,\n rpc_cb_t cb,\n void* ud);\n\nprotected:\n bool OnInit(const VariantList& args) override;\n void OnUninit() override;\n\nprivate:\n static void OnHarborCommand(void* ud, uint32_t command, const VariantList& args);\n void HandleClientOnline(node_id_t source, const VariantList& args);\n void HandleClientOffline(node_id_t source, const VariantList& args);\n void HandleClientMessage(node_id_t source, const VariantList& args);\n void HandleRpcCall(node_id_t source, const VariantList& args);\n void HandleRpcRet(node_id_t source, const VariantList& args);\n\nprivate:\n BackendLogicModule* backend_logic_module_;\n std::map<client_uuid_t, std::unique_ptr<GamePlayer> > players_;\n uint32_t cookie_;\n std::map<uint32_t, std::tuple<rpc_cb_t, void*> > rpc_cbs_;\n};\n\nNS_SPARROW_END\n\n#endif // SPARROW_GATE_SERVER_H\n"
}
] | 79 |
Aligorith/physlabtools | https://github.com/Aligorith/physlabtools | c8e4cde6723acf9ed5dde2e7fe75389766b3f241 | 77ed0bfd88f7305594b23205f10b59696394fb7c | 775bd4dfdbcc9e797de616971e80f35fdfe15299 | refs/heads/master | 2021-01-23T16:26:22.034145 | 2016-02-07T11:50:32 | 2016-02-07T11:50:32 | 32,112,094 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6457249522209167,
"alphanum_fraction": 0.6487727761268616,
"avg_line_length": 30.5933780670166,
"blob_id": "b92a4a97953f334be895356b822fce955154c56e",
"content_id": "d2e2fc816a96f779b3d79bcdb58d6a85523cb28a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 24608,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 755,
"path": "/src/phystools.py",
"repo_name": "Aligorith/physlabtools",
"src_encoding": "UTF-8",
"text": "# Python utilities to perform the calculations for Physics Labs\r\n# Copyright 2009, Joshua Leung ([email protected])\r\n#\r\n# First edition: 2009 April 27\r\n\r\nimport decimal\r\nfrom decimal import Decimal as D\r\nimport decimaltools as DTools\r\nimport math\r\n\r\n###################################\r\n# Units - Single\r\n\r\n# Abstract single 'unit' class\r\nclass Unit:\r\n\t# Instance Information - FOR CUSTOMISATION BY SUBCLASSES\r\n\tunit_symbol = \"\";\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Units\";\t# name of unit for help info\r\n\tbase = 10;\t\t\t\t# size of steps for this type of unit\r\n\tpower = 1;\t\t\t\t# how unit compares to other ones (relative to base)\r\n\tis_SI = False;\t\t\t# unit is SI unit\r\n\t\r\n\t# Return string representation for console use\r\n\tdef __repr__ (self):\r\n\t\treturn self.unit_name;\r\n\t\t\r\n\t# Return string representation for user output\r\n\tdef __str__ (self):\r\n\t\treturn self.unit_symbol;\r\n\t\t\r\n\t# check if the provided unit is the same type of measurement as us\r\n\tdef sameMeasurement (self, other):\r\n\t\t# sanity check\r\n\t\tif isinstance(other, Unit) == False:\r\n\t\t\traise TypeError, \"Not a unit!\";\r\n\t\t\r\n\t\t# if base step is not equal, and/or units are of not same type of measurement\r\n\t\t# no conversion needed, so conversion factor is simply 1\r\n\t\tif isinstance(self, MassUnit):\r\n\t\t\treturn isinstance(other, MassUnit);\r\n\t\telif isinstance(self, LengthUnit):\r\n\t\t\treturn isinstance(other, LengthUnit);\r\n\t\telif isinstance(self, TimeUnit):\r\n\t\t\treturn isinstance(other, TimeUnit);\r\n\t\t\t\r\n\t\t# not the same type\r\n\t\treturn False;\r\n\t\t\r\n\t# Get conversion factor, given another unit, to convert from that unit to us\r\n\t# XXX we may have reversed the order of the naming vs usage :/\r\n\tdef conversionFactor (self, other):\r\n\t\t# check if same type of measurement\r\n\t\tif self.sameMeasurement(other) == False:\r\n\t\t\treturn D(1);\r\n\t\t\r\n\t\t# assume valid types that need converting now (base units should be the same)\r\n\t\tif self.power != other.power:\r\n\t\t\treturn D(other.base ** other.power) / D(self.base ** self.power); \r\n\t\telse:\r\n\t\t\treturn D(1); # no conversion needed...\r\n\r\n# dummy unit - placeholder which does nothing\r\nclass DummyUnit(Unit):\r\n\tpass;\r\n\r\n# -------------------\r\n\r\n# Unit for Mass\r\nclass MassUnit(Unit):\r\n\t# Type Information\r\n\tunit_name = \"Mass\";\t\t# name of unit for help info\r\n\tbase = 10;\t\t\t\t# size of steps for this type of unit\r\n\t\r\n\r\n# kilograms (SI)\r\nclass KilogramMassUnit(MassUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"kg\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Mass (kg)\";\t# name of unit for help info\r\n\tpower = 3;\t\t\t\t\t# how unit compares to other ones\r\n\tis_SI = True;\t\t\t\t# unit is SI unit\r\n\t\r\n# grams\r\nclass GramMassUnit(MassUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"g\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Mass (g)\";\t\t# name of unit for help info\r\n\tpower = 0;\t\t\t\t\t# how unit compares to other ones\r\n\t\r\n# -------------------\r\n\r\n# Unit for Length\r\nclass LengthUnit(Unit):\r\n\t# Type Information\r\n\tunit_name = \"Length\";\t# name of unit for help info\r\n\tbase = 10;\t\t\t\t# size of steps for this type of unit\r\n\t\r\n\r\n# kilometres\r\nclass KilometreLengthUnit(LengthUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"km\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Length (km)\";\t# name of unit for help info\r\n\tpower = 7;\t\t\t\t\t# how unit compares to other ones\r\n\t\r\n# meters (SI)\r\nclass MetreLengthUnit(LengthUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"m\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Length (m)\";\t# name of unit for help info\r\n\tpower = 3;\t\t\t\t\t# how unit compares to other ones\r\n\tis_SI = True;\t\t\t\t# unit is SI unit\r\n\t\r\n# centimetres\r\nclass CentimetreLengthUnit(LengthUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"cm\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Length (cm)\";\t# name of unit for help info\r\n\tpower = 1;\t\t\t\t\t# how unit compares to other ones\r\n\t\r\n# millimetres\r\nclass MillimetreLengthUnit(LengthUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"mm\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Length (mm)\";\t# name of unit for help info\r\n\tpower = 0;\t\t\t\t\t# how unit compares to other ones\r\n\t\r\n# -------------------\r\n\r\n# Unit for Time\r\nclass TimeUnit(Unit):\r\n\t# Type Information\r\n\tunit_name = \"Time\";\t\t# name of unit for help info\r\n\tbase = 60;\t\t\t\t# size of steps for this type of unit\r\n\t\r\n\r\n# seconds (SI)\r\nclass SecondTimeUnit(MassUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"s\";\t\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Time (s)\";\t\t# name of unit for help info\r\n\tpower = 0;\t\t\t\t\t# how unit compares to other ones\r\n\tis_SI = True;\t\t\t\t# unit is SI unit\r\n\t\r\n# minutes\r\nclass MinuteTimeUnit(MassUnit):\r\n\t# Type Information\r\n\tunit_symbol = \"min\";\t\t# 'symbol' to print beside values\r\n\tunit_name = \"Time (min)\";\t# name of unit for help info\r\n\tpower = 1;\t\t\t\t\t# how unit compares to other ones\r\n\t\r\n###################################\r\n# Units - Combinations of them\r\n\t\r\n# Measurement Unit - represents a some form of measurement \r\n# for use when combining multiple units\r\nclass MeasurementUnit:\r\n\t# instance stuff -----------------------------------\r\n\t\r\n\t# instance vars\r\n\tunit= None;\t\t# unit used here\r\n\tpower= 1;\t\t# i.e. unit^power\r\n\t\r\n\tdef __init__ (self, unit, power=1):\r\n\t\t# sanity checks\r\n\t\tif isinstance(unit, Unit):\r\n\t\t\tself.unit= unit;\r\n\t\telse:\r\n\t\t\traise TypeError, \"Unit provided must be an instance of Unit\";\r\n\t\t\t\r\n\t\tif type(power) != int:\r\n\t\t\traise TypeError, \"Power must be an integer\";\r\n\t\telse:\r\n\t\t\tself.power= power;\r\n\t\t\r\n\t# Return string representation for console use\r\n\tdef __repr__ (self):\r\n\t\treturn \"MeasurementUnit(%s, %d)\" % (repr(self.unit), self.power)\r\n\t\t\r\n\t# Return string representation for user output\r\n\tdef __str__ (self):\r\n\t\t# unit^pow\r\n\t\treturn \"%s^%d\" % (self.unit, self.power)\r\n\t\t\r\n\t\r\n\t# type-specific tools -------------------------------\r\n\t\r\n\t# check if unit is combinable with measurement\r\n\tdef combinableUnit (self, unit):\r\n\t\t# firstly, the measurement type test\r\n\t\tif self.unit.sameMeasurement(unit) == False:\r\n\t\t\treturn False;\r\n\t\t\t\r\n\t\t# now the specific unit type test\r\n\t\tif isinstance(unit, self.__class__) == False:\r\n\t\t\t# for now, not acceptable - will form result in another measurement in vars\r\n\t\t\treturn False;\r\n\t\t\r\n\t\t# unit is ok\r\n\t\treturn True;\r\n\t\r\n\t# 'add' power reference\r\n\tdef add (self, other):\r\n\t\tself.power += 1;\r\n\t\t\r\n\t# 'subtract' power reference\r\n\tdef subtract (self):\r\n\t\tself.power -= 1;\r\n\t\t\r\n\t# combine with another measurement\r\n\t# \t- assumes sanity checks done\r\n\tdef combine (self, other):\r\n\t\t# simply combine the powers now, since units are the same \r\n\t\tself.power += other.power;\r\n\t\r\n# ---\t\r\n\r\n# Dummy combined unit class - just store all the provided units\r\nclass CombinedUnits:\r\n\t# class stuff ------------------------------\r\n\tdef __init__ (self, initialValues=[]):\r\n\t\t# init storage\r\n\t\tself.units = [];\r\n\t\t\r\n\t\t# store provided values\r\n\t\tfor val in initialValues:\r\n\t\t\tself.addUnit(val);\r\n\t\r\n\tdef __repr__ (self):\r\n\t\treturn \"CombinedUnits(%s)\" % (repr(self.units));\r\n\t\t\r\n\tdef __str__ (self):\r\n\t\t# loop over measurements, getting a list of the units\r\n\t\tresult = \"\";\r\n\t\tfor unit in self.units:\r\n\t\t\t# if power is 0, don't show, since something cancelled it out\r\n\t\t\tif unit.power != 0:\r\n\t\t\t\t# TODO: should we do any fancy grouping around these?\r\n\t\t\t\tresult += str(unit);\r\n\t\t\t\r\n\t\treturn result;\r\n\t\r\n\t# tools -------------------------------------\r\n\t\r\n\t# add a unit to the list (result of multiplication) \r\n\tdef addUnit (self, unit):\r\n\t\tif isinstance(unit, Unit):\r\n\t\t\t# check if any existing measurement will take it\r\n\t\t\tfor mUnit in self.units:\r\n\t\t\t\t# combine then finish off\r\n\t\t\t\tif mUnit.combinableUnit(unit):\r\n\t\t\t\t\tmUnit.add();\r\n\t\t\t\t\tbreak;\r\n\t\t\telse:\r\n\t\t\t\t# add a new measurement, defaulting to single power\r\n\t\t\t\tself.units.append(MeasurementUnit(unit));\r\n\t\telif isinstance(unit, MeasurementUnit):\r\n\t\t\t# check if any existing measurement just needs some power adding\r\n\t\t\tfor mUnit in self.units:\r\n\t\t\t\t# combine then finish off\r\n\t\t\t\tif mUnit.combinableUnit(unit.unit):\r\n\t\t\t\t\tmUnit.combine(unit);\r\n\t\t\t\t\tbreak;\r\n\t\t\telse:\r\n\t\t\t\t# add the given unit\r\n\t\t\t\tself.units.append(unit);\r\n\t\telse:\r\n\t\t\traise TypeError, \"Not a unit\";\r\n\t\t\t\r\n\t# remove a unit (result of division)\r\n\tdef remove (self, unit):\r\n\t\tif isinstance(unit, Unit):\r\n\t\t\t# check if any existing measurement will take it\r\n\t\t\tfor mUnit in self.units:\r\n\t\t\t\t# combine then finish off\r\n\t\t\t\tif mUnit.combinableUnit(unit):\r\n\t\t\t\t\tmUnit.subtract();\r\n\t\t\t\t\tbreak;\r\n\t\t\telse:\r\n\t\t\t\t# add a new measurement, defaulting to single negative power\r\n\t\t\t\tself.units.append(MeasurementUnit(unit, -1));\r\n\t\telif isinstance(unit, MeasurementUnit):\r\n\t\t\t# check if any existing measurement just needs some power adding\r\n\t\t\tfor mUnit in self.units:\r\n\t\t\t\t# combine then finish off\r\n\t\t\t\tif mUnit.combinableUnit(unit.unit):\r\n\t\t\t\t\tmUnit.combine(unit);\r\n\t\t\t\t\tbreak;\r\n\t\t\telse:\r\n\t\t\t\t# add the given unit, but make sure that it is negative\r\n\t\t\t\tunit.power= -abs(unit.power);\r\n\t\t\t\tself.units.append(unit);\r\n\t\telse:\r\n\t\t\traise TypeError, \"Not a unit\";\r\n\t\r\n###################################\r\n# Physics Number\r\n\r\n# Special representation of numbers as value + absolute uncertainty + units, \r\n# as is required in Physics Calculations. \r\nclass PhysNum:\r\n\t# helper utility functions for class ---------------\r\n\r\n\t# validate numeric arguments to yield 'Decimal' objects\r\n\t@staticmethod\r\n\tdef _validateNumArg (arg):\r\n\t\t# check for decimal \r\n\t\tif type(arg) == D:\r\n\t\t\treturn arg;\r\n\t\t\r\n\t\t# convert standard 'number' types to decimal\r\n\t\tif type(arg) == int:\r\n\t\t\treturn D(arg);\r\n\t\tif type(arg) == float:\r\n\t\t\treturn D(str(arg));\r\n\t\t\t\r\n\t\t# if a string, try to convert to a decimal, \r\n\t\t# as it may be just a number in 'disguise'\r\n\t\tif type(arg) == str:\r\n\t\t\treturn D(arg);\r\n\t\t\t\r\n\t\t# non numeric types can't be used!\r\n\t\traise TypeError, \"Non-numeric type encountered!\";\r\n\t\t\r\n\t# validate the other arg given to an arithmetic operator\r\n\tdef _validateArithArg (self, arg):\r\n\t\t# action to take depends on what the type of the given data is\r\n\t\tif type(arg) in (int, float, D):\r\n\t\t\t# convert to a PhysNum to be able to add normally\r\n\t\t\targ= PhysNum(arg, 0, self.getUnits());\r\n\t\telif isinstance(arg, PhysNum) == False:\r\n\t\t\t# error.. cannot add\r\n\t\t\traise TypeError, \"Not a numeric type\";\r\n\t\t\t\r\n\t\t# return the arg now that we've validated it\r\n\t\treturn arg;\r\n\t\t\r\n\t# class stuff ----------------------------------\r\n\t\r\n\t# instance variables\r\n\tv= None; \t\t# Decimal - 'value'\r\n\te= None;\t\t# Decimal - 'uncertainty'\r\n\tunits= None;\t# Unit - 'units' \r\n\t\r\n\t# constructor \r\n\tdef __init__ (self, value, uncertainty=0, units=None):\r\n\t\t# store arguments as instance variables after validating them first\r\n\t\tself.v= PhysNum._validateNumArg(value);\r\n\t\tself.e= PhysNum._validateNumArg(uncertainty);\r\n\t\tself.units= units; \r\n\t\r\n\t# console representation\r\n\tdef __repr__ (self):\r\n\t\treturn \"PhysNum(%s, %s, %s)\" % (self.v, self.e, repr(self.units));\r\n\t\t\r\n\t# user-output representation (standard representation)\r\n\tdef __str__ (self):\r\n\t\t# get units first - they may not exist (some cases not coded yet!)\r\n\t\tunits= self.units if self.units else \"\";\r\n\t\treturn \"%s%s +/- %s%s\" % (self.v, units, self.e, units);\r\n\t\t\r\n\t# specialist user-output representation method\r\n\t#\t- latex options: 0 = off, 1 = manually defined, 2 = with special macro\r\n\t# TODO: implement the precision controls\r\n\tdef toStr (self, latex=0, withUnits=False, precision=28):\r\n\t\t# get units first - just in case they are used (but they may not exist)\r\n\t\tunits= self.units if (self.units and withUnits) else \"\";\r\n\t\t\r\n\t\t# return format\r\n\t\tif latex:\r\n\t\t\tif latex == 2:\t# special mode... TODO: need define for this!\r\n\t\t\t\treturn \"\\physNum{%s}{%s}{%s}\" % (self.v, self.e, units);\r\n\t\t\telse:\r\n\t\t\t\treturn \"$(%s \\pm %s)%s$\" % (self.v, self.e, units);\r\n\t\telse:\r\n\t\t\treturn \"%s%s +/- %s%s\" % (self.v, units, self.e, units);\r\n\t\t\r\n\t# getters --------------------------------------\r\n\t\r\n\t# Get the absolute value of this 'number'\r\n\t#\twithUnits: (boolean) if True, return a outputtable string containing the absolute value and units\r\n\t#\t\t\t otherwise, just return the Decimal() that represents this\r\n\tdef getValue (self, withUnits=False):\r\n\t\tif withUnits:\r\n\t\t\treturn \"%s%s\" % (self.v, self.units);\r\n\t\telse:\r\n\t\t\treturn self.v;\r\n\t\t\r\n\t# Get the absolute uncertainty of this 'number'\r\n\t#\twithUnits: (boolean) if True, return a outputtable string containing the absolute uncertainty and units\r\n\t#\t\t\t otherwise, just return the Decimal() that represents this\r\n\tdef getUncertainty_Absolute (self, withUnits=False):\r\n\t\tif withUnits:\r\n\t\t\treturn \"%s%s\" % (self.e, self.units);\r\n\t\telse:\r\n\t\t\treturn self.e;\r\n\t\t\r\n\t# Get the fractional uncertainty of this 'number' as a Decimal()\r\n\tdef getUncertainty_Fractional (self):\r\n\t\t# sanity check: if our value is 0, simply return zero instead of getting divide by zero\r\n\t\tif self.v == 0:\r\n\t\t\t# for safety, just return 0\r\n\t\t\treturn D('0');\r\n\t\telse:\r\n\t\t\t# plus operator here forces rounding...\r\n\t\t\treturn +(self.e / self.v);\r\n\t\r\n\t# Get the percentage uncertainty of this 'number'\r\n\t#\twithUnits: (boolean) if True, return a outputtable string containing the percentage uncertainty and 'units'\r\n\t#\t\t\t (i.e. percent, %). Otherwise, just return the Decimal() that represents this\r\n\tdef getUncertainty_Percentage (self, withUnits=False):\r\n\t\tif withUnits:\r\n\t\t\treturn \"%s%%\" % (self.getUncertainty_Fractional() * 100)\r\n\t\telse:\r\n\t\t\treturn self.getUncertainty_Fractional() * 100;\r\n\t\t\r\n\t# Get the units of this 'number' as a Unit\r\n\tdef getUnits (self):\r\n\t\treturn self.units;\r\n\t\r\n\t# assorted number ops ---------------------------\r\n\t\r\n\t# change the units of this number to the specified units\r\n\tdef changeUnits (self, newUnits):\r\n\t\t# check if we need to do anything (i.e. not same units?)\r\n\t\t# \tcurrently, we just check this by using the representations, which we assume will be different (no typos!)\r\n\t\townUnits = self.getUnits();\r\n\t\tif not (ownUnits or newUnits):\r\n\t\t\treturn;\r\n\t\tif ownUnits.sameMeasurement(newUnits) == False:\r\n\t\t\treturn;\r\n\t\tif repr(ownUnits) == repr(newUnits):\r\n\t\t\treturn;\r\n\t\t\t\r\n\t\t# get the conversion factor (going from own to new, so use new.conversion... )\r\n\t\tconvFac = newUnits.conversionFactor(ownUnits);\r\n\t\t\r\n\t\t# apply the conversion to our own values\r\n\t\tself.v *= convFac;\r\n\t\tself.e *= convFac;\r\n\t\t\r\n\t\t# set the new units\r\n\t\tself.units= newUnits;\r\n\t\r\n\t# make a copy of this number with the units changed to the specified ones\r\n\tdef convertUnits (self, newUnits):\r\n\t\t# check if we need to do anything (i.e. not same units?)\r\n\t\t# \tcurrently, we just check this by using the representations, which we assume will be different (no typos!)\r\n\t\townUnits = self.getUnits();\r\n\t\tif not (ownUnits or newUnits):\r\n\t\t\treturn None;\r\n\t\tif repr(ownUnits) == repr(newUnits):\r\n\t\t\treturn None;\r\n\t\t\t\r\n\t\t# get the conversion factor (going from own to new, so use new.conversion... )\r\n\t\tconvFac = newUnits.conversionFactor(ownUnits);\r\n\t\t\r\n\t\t# apply the conversion to our own values\r\n\t\tval= self.v * convFac;\r\n\t\terr= self.e * convFac;\r\n\t\t\r\n\t\t# return the new type\r\n\t\treturn PhysNum(val, err, newUnits);\r\n\t\r\n\t# unary arithmetic operators -------------------------\r\n\t\r\n\t# absolute value operator - same as doing getValue(), so just reference that\r\n\t__abs__ = getValue;\r\n\t\r\n\t# pos operator - this is overloaded to return the upper value allowed \r\n\t# \tby the uncertainty as a Decimal()\r\n\tdef __pos__ (self):\r\n\t\treturn self.getValue() + self.getUncertainty_Absolute();\r\n\t\r\n\t# pos operator - this is overloaded to return the lower value allowed \r\n\t# \tby the uncertainty as a Decimal()\r\n\tdef __neg__ (self):\r\n\t\treturn self.getValue() - self.getUncertainty_Absolute();\r\n\t\t\r\n\t# invert the values (i.e. 1/val) - special case of division, with top numbe == 1\r\n\tdef __invert__ (self):\r\n\t\t# final units are combination of these units\r\n\t\tunits= self.getUnits();\r\n\t\tnewUnits = None; # FIXME!!!!\r\n\t\t\r\n\t\t# simply divide the absolute value\r\n\t\tif self.getValue() == 0:\r\n\t\t\t# for now, refuse to divide this\r\n\t\t\t#val= Decimal('0');\r\n\t\t\traise ZeroDivisionError;\r\n\t\telse:\r\n\t\t\tval= Decimal('1') / self.getValue();\r\n\t\t\r\n\t\t# the new uncertainty is simply the sum of the fractional uncertainties of the top and bottom,\r\n\t\t# multiplied by the new value. This simplifies down to being simply the uncertainty * new value\r\n\t\terr= self.getUncertainty_Fractional() * val;\r\n\t\t\r\n\t\t# return a new number\r\n\t\treturn PhysNum(val, err, newUnits);\r\n\t\t\r\n\t# arithmetic operators (LHS-default) ---------------------------\r\n\t\r\n\t# addition operator - returns the result as a new PhysNum\r\n\tdef __add__ (self, other):\r\n\t\t# validate given arg\r\n\t\tother= self._validateArithArg(other);\r\n\t\t\r\n\t\t# change the units of the number we're adding so that they're compatible \r\n\t\t# (if same type of measurement, that is)\r\n\t\tunits= self.getUnits();\r\n\t\tother.changeUnits(units);\r\n\t\t\r\n\t\t# simply add the component parts, doing unit conversions on the alternate data\r\n\t\tval= self.getValue() + other.getValue();\r\n\t\terr= self.getUncertainty_Absolute() + other.getUncertainty_Absolute();\r\n\t\t\r\n\t\t# return the result\r\n\t\treturn PhysNum(val, err, units);\r\n\t\t\r\n\t# subtraction operator - returns the result as a new PhysNum\r\n\tdef __sub__ (self, other):\r\n\t\t# validate given arg\r\n\t\tother= self._validateArithArg(other);\r\n\t\t\r\n\t\t# change the units of the number we're subtracting so that they're compatible \r\n\t\t# (if same type of measurement, that is)\r\n\t\tunits= self.getUnits();\r\n\t\tother.changeUnits(units);\r\n\t\t\r\n\t\t# subtract the values, but always add the absolute uncertainties\r\n\t\t# doing unit conversions on the alternate data\r\n\t\tval= self.getValue() - other.getValue();\r\n\t\terr= self.getUncertainty_Absolute() + other.getUncertainty_Absolute();\r\n\t\t\r\n\t\t# return the result\r\n\t\treturn PhysNum(val, err, units);\r\n\t\r\n\t# multiplication operator - return the result as a new PhysNum\r\n\tdef __mul__ (self, other):\r\n\t\t# validate given arg\r\n\t\tother= self._validateArithArg(other);\r\n\t\t\r\n\t\t# final units are combination of these units\r\n\t\tunits= self.getUnits();\r\n\t\tnewUnits = units; # FIXME!!!!\r\n\t\t\r\n\t\t# change the units of the number we're multiplying with so that they're compatible \r\n\t\t# (if same type of measurement, that is)\r\n\t\tother.changeUnits(units);\r\n\t\t\r\n\t\t# simply multiply the absolute value\r\n\t\tval= self.getValue() * other.getValue();\r\n\t\t\r\n\t\t# to obtain the absolute uncertainty, need to add the percentage/fractional ones, \r\n\t\t# then multiply this by the new value to get the new absolute value\r\n\t\terr= (self.getUncertainty_Fractional() + other.getUncertainty_Fractional()) * val;\r\n\t\t\r\n\t\t# return a new number\r\n\t\treturn PhysNum(val, err, newUnits);\r\n\t\t\r\n\t# power operator - basically same as multiplication, but we can't do fractional values easily...\r\n\tdef __pow__ (self, other, modulo=0):\r\n\t\t# check if other is integer \r\n\t\t# FIXME: at some point, it would be good to have this\r\n\t\tif type(other) is not int:\r\n\t\t\traise NotImplemented, \"Only integer powers allowed\" \r\n\t\t\t\r\n\t\t# check if no multiplication needed?\r\n\t\tif other == 0:\r\n\t\t\t# N^0 is always 1\r\n\t\t\treturn PhysNum(1, 0, self.getUnits());\r\n\t\telif other < 0:\r\n\t\t\t# we will need to perform a division step at end, but firstly, \r\n\t\t\t# take the absolute value of the int to use\r\n\t\t\tother= abs(other);\r\n\t\t\tpostDiv= True;\r\n\t\telse:\r\n\t\t\t# just multiply...\r\n\t\t\tpostDiv= False;\r\n\t\t\t\r\n\t\t# perform a little loop, multiplying ourself by our original value multiple times\r\n\t\t# TODO: could optimise to only do even powers...\r\n\t\tresult= None;\r\n\t\tfor i in xrange(other):\t\r\n\t\t\tif result:\r\n\t\t\t\t# keep multipling if first one has been set already\r\n\t\t\t\tresult *= self;\r\n\t\t\telif other == 1:\r\n\t\t\t\t# there's just one, so just make a copy of self\r\n\t\t\t\tresult= eval(repr(self));\r\n\t\t\telse:\r\n\t\t\t\t# since this is just a starting point, this is fine...\r\n\t\t\t\tresult = self;\r\n\t\t\r\n\t\t# if we need to invert the result\r\n\t\tif postDiv: \r\n\t\t\tresult = ~result;\r\n\t\t\t\r\n\t\t# return the resulting new number\r\n\t\treturn result;\r\n\t\r\n\t# division operator - divide values, but add fractional uncertainties\r\n\tdef __div__ (self, other):\r\n\t\t# validate given arg\r\n\t\tother= self._validateArithArg(other);\r\n\t\t\r\n\t\t# final units are combination of these units\r\n\t\tunits= self.getUnits();\r\n\t\tnewUnits = units; # FIXME!!!!\r\n\t\t\r\n\t\t# change the units of the number we're multiplying with so that they're compatible \r\n\t\t# (if same type of measurement, that is)\r\n\t\tother.changeUnits(units);\r\n\t\t\r\n\t\t# simply divide the absolute value\r\n\t\tif other.getValue() == 0:\r\n\t\t\t# for now, refuse to divide this\r\n\t\t\t#val= Decimal('0');\r\n\t\t\traise ZeroDivisionError\r\n\t\telse:\r\n\t\t\tval= self.getValue() / other.getValue();\r\n\t\t\r\n\t\t# to obtain the absolute uncertainty, need to add the percentage/fractional ones, \r\n\t\t# then multiply this by the new value to get the new absolute value\r\n\t\terr= (self.getUncertainty_Fractional() + other.getUncertainty_Fractional()) * val;\r\n\t\t\r\n\t\t# return a new number\r\n\t\treturn PhysNum(val, err, newUnits);\r\n\t\t\r\n\t# truediv is the same as div for now\r\n\t__truediv__ = __div__;\r\n\t\r\n\t\r\n\t# arithmetic operators (RHS) ---------------------------\r\n\t\r\n\t# addition operator - returns the result as a new PhysNum\r\n\t# \tsame as LHS addition, since communicative result\r\n\t__radd__ = __add__;\r\n\t\r\n\t# multiplication operator - return the result as a new PhysNum\r\n\t# \tsame as LHS multiplication, since communicative result\r\n\t__rmul__ = __mul__;\r\n\t\r\n\t# subtraction operator - returns the result as a new PhysNum\r\n\t#\torder is different, since subtraction is not communicative\r\n\tdef __rsub__ (self, other):\r\n\t\t# validate given arg\r\n\t\tother= self._validateArithArg(other);\r\n\t\t\r\n\t\t# change the units of the number we're subtracting so that they're compatible \r\n\t\t# (if same type of measurement, that is)\r\n\t\tunits= self.getUnits();\r\n\t\tother.changeUnits(units);\r\n\t\t\r\n\t\t# subtract the values, but always add the absolute uncertainties\r\n\t\t# doing unit conversions on the alternate data\r\n\t\tval= other.getValue() - self.getValue();\r\n\t\terr= self.getUncertainty_Absolute() + other.getUncertainty_Absolute();\r\n\t\t\r\n\t\t# return the result\r\n\t\treturn PhysNum(val, err, units);\r\n\t\t\r\n\t# division operator - divide values, but add fractional uncertainties\r\n\t#\torder is different, since subtraction is not totally communicative\r\n\tdef __rdiv__ (self, other):\r\n\t\t# validate given arg\r\n\t\tother= self._validateArithArg(other);\r\n\t\t\r\n\t\t# final units are combination of these units\r\n\t\tunits= self.getUnits();\r\n\t\tnewUnits = units; # FIXME!!!!\r\n\t\t\r\n\t\t# change the units of the number we're multiplying with so that they're compatible \r\n\t\t# (if same type of measurement, that is)\r\n\t\tother.changeUnits(units);\r\n\t\t\r\n\t\t# simply divide the absolute value\r\n\t\tif self.getValue() == 0:\r\n\t\t\t# for now, refuse to divide this\r\n\t\t\t#val= Decimal('0');\r\n\t\t\traise ZeroDivisionError\r\n\t\telse:\r\n\t\t\tval= other.getValue() / self.getValue();\r\n\t\t\r\n\t\t# to obtain the absolute uncertainty, need to add the percentage/fractional ones, \r\n\t\t# then multiply this by the new value to get the new absolute value\r\n\t\terr= (self.getUncertainty_Fractional() + other.getUncertainty_Fractional()) * val;\r\n\t\t\r\n\t\t# return a new number\r\n\t\treturn PhysNum(val, err, newUnits);\r\n\t\t\r\n\t# brute-force math ------------------------------\r\n\t\r\n\t# apply the given function (requiring single parameter only) \r\n\t# on this number to yield a new PhysNum\r\n\t# \t- uses brute-force calculation techniques\r\n\tdef calcFunc (self, func):\r\n\t\t# the new value is simply the result of applying the function to it\r\n\t\tval= func(self.v);\r\n\t\t\r\n\t\t# the absolute uncertainty is half the magnitude of the difference between the\r\n\t\t# upper and lower bounds allowable by the absolute uncertainties\r\n\t\t# WARNING: binary floating point errors are introduced here, as the functions \r\n\t\t# \t\t called are still essentially binary :/\r\n\t\tubound= D(str(func(self.v + self.e)));\r\n\t\tlbound= D(str(func(self.v - self.e)));\r\n\t\terr= (ubound - lbound) / 2;\r\n\t\t\r\n\t\t# return a new number \r\n\t\t# FIXME: what about the units? I guess they're still ok?\r\n\t\treturn PhysNum(val, err, self.units); \r\n\r\n###################################\r\n# Commonly-Performed Math API\r\n# TODO: separate into own file?\r\n\r\n# calculate the sum of a given list of values\r\ndef phys_sum (values):\r\n\t# init vars used\r\n\tresult= None;\r\n\ttot= len(values);\r\n\t\r\n\t# loop over values, summing them\r\n\tfor val in values:\t\r\n\t\tif result:\r\n\t\t\tresult += val;\r\n\t\telif tot == 1:\r\n\t\t\tresult = eval(repr(val));\r\n\t\telse:\r\n\t\t\tresult = val;\r\n\t\r\n\t# return the sum of the values\r\n\treturn result;\r\n\t\r\n# calculate the average value of a list of values\r\ndef phys_average (values):\r\n\tN = len(values);\r\n\t\r\n\t# get the sum of these values\r\n\tresult= phys_sum(values);\r\n\t\r\n\t# divide the absolute value by N, but the uncertainty by square-root of N\r\n\t# ..tsk tsk... directly modifying PhysNum like this is bad...\r\n\tresult.v /= N;\r\n\tresult.e /= D(N).sqrt();\t# decimal provides its own precise sqrt func\r\n\t\r\n\t# return the result\r\n\treturn result;\r\n\t\r\n###################################\r\n# Unit Tests\r\n# ... TODO!!! ...\r\n\r\nif __name__ == '__main__':\r\n\tpass;\r\n"
},
{
"alpha_fraction": 0.5675477385520935,
"alphanum_fraction": 0.5712187886238098,
"avg_line_length": 21.482759475708008,
"blob_id": "21d7e5f571c93f0a7f16bd17d838600d14b036c0",
"content_id": "3e26f0dd2d5ba4c60a5a3aef32b267d783f8e5b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1362,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 58,
"path": "/examples/tests.py",
"repo_name": "Aligorith/physlabtools",
"src_encoding": "UTF-8",
"text": "# Tests to verify that the code works correctly\r\n\r\nfrom phystools import *\r\n\r\n#############################\r\n\r\n# unit conversion tests\r\ndef test_units ():\r\n\t# Length\r\n\tprint \"Doing length tests:\"\r\n\taLength = PhysNum(2, 1, MetreLengthUnit());\r\n\tprint \"\\tOriginal aLength: \", aLength\r\n\t\r\n\tbLength= aLength.convertUnits(MillimetreLengthUnit());\r\n\tprint \"\\tNew bLength (after mm conversion): \", bLength\r\n\t\r\n# unit comparisons test\r\ndef test_unitMatching ():\r\n\tprint \"Doing type equality checks:\"\r\n\tx = MetreLengthUnit;\r\n\ty = MillimetreLengthUnit;\r\n\t\r\n\tprint \"- Method 1\"\r\n\ta = MetreLengthUnit();\r\n\tb = MetreLengthUnit();\r\n\tc = MillimetreLengthUnit();\r\n\t\r\n\tprint \"\\t Type A, B, C: \", type(a), type(b), type(c) \r\n\tprint \"\\t A == B\", type(a) == type(b)\r\n\tprint \"\\t A == C\", type(a) == type(c)\r\n\t\r\n\t\r\n\tprint \"- Method 2\"\r\n\ta = x();\r\n\tb = x();\r\n\tc = y();\r\n\t\r\n\tprint \"\\t Type A, B, C: \", type(a), type(b), type(c) \r\n\tprint \"\\t A == B\", type(a) == type(b)\r\n\tprint \"\\t A == C\", type(a) == type(c)\r\n\t\r\n\tprint \"- Method 3\"\r\n\ta = MetreLengthUnit();\r\n\tb = MetreLengthUnit();\r\n\tc = MillimetreLengthUnit();\r\n\t\r\n\tprint \"\\t A == B\", isinstance(a, b.__class__);\r\n\tprint \"\\t A == C\", isinstance(a, c.__class__);\r\n\t\r\n# no units testing\r\ndef test_NoUnits ():\r\n\tpass;\r\n\r\n#############################\r\n\r\n# uncomment the tests we want to perform...\r\n#test_units();\r\ntest_unitMatching();\r\n"
},
{
"alpha_fraction": 0.7029643058776855,
"alphanum_fraction": 0.7350271940231323,
"avg_line_length": 37.41860580444336,
"blob_id": "ceedbf7f29d5c167d2fbc4503653396721ea9aa7",
"content_id": "426d2059b94d1bb66f28a826ad1185cc35c53209",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1653,
"license_type": "no_license",
"max_line_length": 335,
"num_lines": 43,
"path": "/README.md",
"repo_name": "Aligorith/physlabtools",
"src_encoding": "UTF-8",
"text": "physlabtools\n============\nPhysics lab calculations made easier\n\n# About\n\nThis library was coded to reduce the amount of manual computation work (and mistakes made) when performing calculations for Physics labs. Namely, this library is all about handling the calculation of \"uncertainties\" (or \"standard errors\" for Biologists/everyone else) when calculating the result to data collected during a lab session.\n\nThere are two main types provided here: \n 1. A numerical type which represents a tuple of (value +/- uncertainty) + units, which can participate in standard algebraic operations \n 2. A unit type (used for 1) providing information for unit conversions\n\nThe numeric type allows the uncertainty associated with a value to be propagated and automatically calculated through the process of evaluating more complicated expressions. Meanwhile, unit conversions can also take place, whilst preserving the integrity of the data too.\n\n# Usage\n\nFirst, import the library... \n```\nfrom phystools import *\nP = PhysNum # get shorthand ref for the class\n```\n\nGet instances of the units to use...\n```\nmmDim = MillimetreLengthUnit(); \nmmDim = MetreLengthUnit();\n\ngDim = GramMassUnit();\nkgDim= KilogramMassUnit();\n\nsDim = SecondTimeUnit() \n```\n\nDefine some data (note the use of the 'P' shorthand defined earlier)... \n```\nexp_data = [\n [ P(612, 1, mmDim), P(67.72, 0.05, sDim), P(67.62, 0.05, sDim), P(67.80, 0.05, sDim)], \n [ P(481, 1, mmDim), P(60.28, 0.05, sDim), P(60.21, 0.05, sDim), P(60.28, 0.05, sDim)], \n # etc. \n]\n```\n\nPerform calculations on the data as if they were regular numbers, except that we can retrieve the uncertanties data at any point\n\n"
},
{
"alpha_fraction": 0.5681450366973877,
"alphanum_fraction": 0.5972806811332703,
"avg_line_length": 27.419048309326172,
"blob_id": "58bc19094e0f6ccff2c610fa18efaaad657de053",
"content_id": "cb3827698599380a69c863105f2dc4c01bf0b83c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9267,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 315,
"path": "/examples/lab3_sampledata_calcs.py",
"repo_name": "Aligorith/physlabtools",
"src_encoding": "UTF-8",
"text": "# Use our utilities to perform series of calculations required for data \r\n# for Lab3 Formal Report using Sample Data given\r\n\r\nimport sys\r\nimport os\r\nfrom phystools import *\r\n\r\n##################################\r\n# globals \r\n\r\n# ---\r\n# Measurement instances \r\n\r\nmmDim= MillimetreLengthUnit();\r\nmDim= MetreLengthUnit();\r\n\r\ngDim= GramMassUnit();\r\nkgDim= KilogramMassUnit();\r\n\r\nsDim= SecondTimeUnit();\r\n\r\n# ---\r\n\r\nP= PhysNum;\t# NOTE: class reference, not value reference\r\n\r\n##################################\r\n\r\n# calculate average wire radius\r\ndef calc_AverageWireRadius (values):\r\n\tglobal mDim;\r\n\t\r\n\t# convert from diameter to radius\r\n\twire_radii = [];\r\n\tfor val in values:\r\n\t\t# firstly, make sure we're in standard units\r\n\t\tval.changeUnits(mDim);\r\n\t\t\r\n\t\t# now append to list\r\n\t\twire_radii.append(val/2);\r\n\t\r\n\t# return the average\r\n\treturn phys_average(wire_radii);\r\n\t\r\n# calculate moment of inertia of sphere\r\ndef calc_SphereInertia (mass, diameter):\r\n\tglobal mDim, kgDim;\r\n\t\r\n\t# convert values to SI-units\r\n\tmass.changeUnits(kgDim);\r\n\tdiameter.changeUnits(mDim);\r\n\t\r\n\t# convert diameter to radius\r\n\tradius= diameter / 2;\r\n\t\r\n\t# use the formula:\r\n\t# \tI = 2/5 * m * r^2\r\n\treturn (mass * radius**2) * (D(2) / D(5));\r\n\t\r\n# calculate moment of inertia of heart-disk\r\ndef calc_HeartInertia(Is, T1s, T2h):\r\n\treturn T2h/T1s * Is;\r\n\r\n# calculate value of T for the given values (assumes only 3 of them)\r\ndef calc_T (t_values):\r\n\t# convert the t values (i.e. time for 20 oscillations) to T values\r\n\tT_values = [t/20 for t in t_values];\r\n\t\r\n\t# calculate the 'average' T value\r\n\t# TODO: in the lab specs, we should just set the uncertainty to be the\r\n\t#\t largest diff for T_values from T\r\n\tT = phys_average(T_values);\r\n\t\r\n\t# return T\r\n\treturn T;\r\n\t\r\n# calculate value of n - modulus of rigidity\r\ndef calc_n (Inertia, Slope, radius):\r\n\t# n = (8PI I) / (S r4)\r\n\treturn (Inertia * ( D(8)*D(str(math.pi)) )) / (Slope * radius**4);\r\n\r\n##################################\r\n\r\n# run program\r\n#\tstr: (string) string to print before running command\r\n#\tcmd: (string) command-line to execute\r\ndef Run_Program (str, cmd):\r\n\tif str != None: \r\n\t\tprint str\r\n\t\r\n\tstatus = os.system(cmd)\r\n\t\r\n\tif status: \r\n\t\tprint \"$$$ Error No: \", status\r\n\t\traise \"Runtime Error\"\r\n\t\r\n# ----\r\n\r\n# write latex file of the raw data\r\ndef write_latex_results (fileN, labels, format, data, caption):\r\n\t# open the nominated file for datafile writing\r\n\tf= file(fileN+\".tex\", 'w');\r\n\t\r\n\t# write prefactory stuff\r\n\tf.write(\"\\\\begin{table}[h]\\n\");\r\n\tf.write(\"\\\\begin{tabular}{ %s }\\n\" % ( \" | \".join([\"c\" for x in labels]) ));\r\n\tf.write(\"%s \\\\\\\\ \\hline \\n\" % ( \" & \".join(labels) ));\r\n\t\r\n\t# loop over data, adding the specific columns we require\r\n\tfor entry in data:\t\r\n\t\tf.write( \" & \".join(format(entry)) );\r\n\t\tif entry != data[-1]:\r\n\t\t\tf.write(\"\\\\\\\\ \\n\");\r\n\t\telse:\r\n\t\t\tf.write(\"\\n\");\r\n\t\r\n\t# write finishing stuff\r\n\tf.write(\"\\\\end{tabular}\\n\");\r\n\tif caption: f.write(\"\\\\caption{%s}\\n\" % (caption));\r\n\tf.write(\"\\\\end{table}\\n\");\r\n\t\r\n\t# close the file now\r\n\tf.close();\r\n\t\r\n# ----\r\n\t\r\n# write the given data set to the named files in standard gnuplot plotting format\r\ndef write_gnuplot_datafile (fileN, data):\r\n\t# open the nominated file for datafile writing\r\n\tf= file(fileN+\".dat\", 'w');\r\n\t\r\n\t# loop over data, adding the specific columns we require\r\n\tfor entry in data:\r\n\t\t# x y xdelta ydelta\r\n\t\tx= entry['L'].getValue();\r\n\t\txd= entry['L'].getUncertainty_Absolute();\r\n\t\ty= entry['T^2'].getValue();\r\n\t\tyd= entry['T^2'].getUncertainty_Absolute();\r\n\t\t\r\n\t\tf.write(\"%s %s %s %s\\n\" % (x, y, xd, yd));\r\n\t\r\n\t# close the file now\r\n\tf.close();\r\n\t\r\n\t\r\n\t# open the nominated file for plotting commands writing\r\n\tf= file(fileN+\".plt\", 'w');\r\n\t#f.write(\"set terminal latex\\n\");\r\n\tf.write(\"set output \\\"%s\\\"\\n\" % (fileN+\".tex\"));\r\n\tf.write(\"set size 3.5/5, 3/3.\\n\");\t# xxx\r\n\tf.write(\"set ylabel \\\"T^2 (s^2)\\\"\\n\");\r\n\tf.write(\"set xlabel \\\"L (m)\\\"\\n\");\r\n\tf.write(\"set grid \\n\");\r\n\tf.write(\"set title \\\"T^2 (Torsional Oscillation Period in Seconds Square) vs L (Length in m) for Solid Sphere \\\"\\n\");\r\n\t\r\n\tf.write(\"plot '%s' with xyerrorbars\\n\" % (fileN+\".dat\"));\r\n\tf.write(\"plot '%s' using $1:$2:$4 with yerrorbars\\n\" % (fileN+\".dat\"));\r\n\t\r\n\t# close the file now\r\n\tf.close();\r\n\t\r\n# request slope from user\r\n# TODO: just request the rise/run info?\r\ndef ui_GetSlope ():\r\n\tprint \"\\n\\n@ Run gnuplot on the graph file!!!\"\r\n\tprint \"Then specify the appropriate values for the best-fitslope (S) and worst-fit slope (S')\"\r\n\t\r\n\tS_dy = raw_input(\">>> Slope (S) - Rise (dy): \");\r\n\tS_dx = raw_input(\">>> Slope (S) - Run (dx): \");\r\n\tSw_dy= raw_input(\">>> Slope Worst (S') - Rise (dy): \");\r\n\tSw_dx= raw_input(\">>> Slope Worst (S') - Run (dx): \");\r\n\t\r\n\tS = D(S_dy) / D(S_dx);\r\n\tSw = D(Sw_dy) / D(Sw_dx);\r\n\tdS = abs(S - Sw); # uncertainty is the difference\r\n\t\r\n\t# no units for now\r\n\treturn P(S, dS, DummyUnit());\r\n\t\r\n# ----\r\n\r\n##################################\r\n\r\nif __name__ == '__main__':\r\n\tprint \"Phys113 Lab 3 Data Calculations \\n\"\r\n\t\r\n\t# set up the context to only use precision sufficient for accurate calcs\r\n\tdecimal.getcontext().prec = 10;\r\n\t\r\n\t# calculate average thickness of wire\r\n\tprint \"Calculating Wire Thickness...\"\r\n\t#\tdata\r\n\twire_diameters = [\\\r\n\t\tP(0.378, 0.0005, mmDim), \r\n\t\tP(0.378, 0.0005, mmDim),\r\n\t\tP(0.379, 0.0005, mmDim),\r\n\t\tP(0.375, 0.0005, mmDim),\r\n\t\tP(0.380, 0.0005, mmDim),\r\n\t]\r\n\t#\tcalculate\r\n\twire_average= calc_AverageWireRadius(wire_diameters);\r\n\tprint \"\\tAverage Wire Radius is \", wire_average, \"\\n\"\r\n\t#\twrite results\r\n\tdef __doformat_wdiameter (entry):\r\n\t\t# L | t1/2/3\r\n\t\tvals = [];\r\n\t\tvals.append(\"$%s$\" % (str(entry).replace(\"+/-\", \"\\\\pm\")));\r\n\t\t\r\n\t\treturn vals;\r\n\twrite_latex_results(\"wireResults\", \r\n\t\t[\"Diameter (mm)\"],\r\n\t\t __doformat_wdiameter, wire_diameters,\r\n\t\t \"Raw data for wire diameters\");\r\n\t\r\n\t\r\n\t# calculating inertia of solid sphere\r\n\tprint \"Calculating Inertia of Solid Sphere...\"\r\n\t#\tdata\r\n\tsphere_mass= P(359.9, 0.1, gDim);\r\n\tsphere_diameter= P(44.48, 0.03, mmDim);\r\n\t#\tcalculate\r\n\tsphere_inertia= calc_SphereInertia(sphere_mass, sphere_diameter);\r\n\tprint \"\\tSolid Sphere Inertia is \", sphere_inertia, \"\\n\"\r\n\t\r\n\t\r\n\t# normal data - format = length, 3 times\r\n\tprint \"Calculating Results for Solid Sphere...\"\r\n\t#\tdata\r\n\tsphere_data = [\\\r\n\t[P(612, 1, mmDim), \tP(67.72, 0.05, sDim), P(67.62, 0.05, sDim), P(67.80, 0.05, sDim)],\r\n\t[P(481, 1, mmDim), \tP(60.28, 0.05, sDim), P(60.21, 0.05, sDim), P(60.28, 0.05, sDim)],\r\n\t[P(322, 1, mmDim),\tP(50.47, 0.05, sDim), P(50.35, 0.05, sDim), P(50.56, 0.05, sDim)],\r\n\t[P(202, 1, mmDim),\tP(39.50, 0.05, sDim), P(39.32, 0.05, sDim), P(32.29, 0.05, sDim)]\r\n\t]\r\n\t\r\n\t# \tcalculate T values, and print those\r\n\tprint \"\\t L |\", \"T |\", \"T^2 \"\r\n\tT_vals = [];\r\n\tfor sdata in sphere_data:\r\n\t\t# for each set of measurements, store the calculated results as dict\r\n\t\t# \tlength first - convert units to SI-units\r\n\t\tT_valD = {'L':sdata[0]};\r\n\t\tT_valD['L'].changeUnits(mDim);\r\n\t\t# \tcalculate T-Values, and store as separate \r\n\t\tT_valD['T']= calc_T(sdata[1:]); # strip off the length var to get the data\r\n\t\tT_valD['T^2']= T_valD['T'] ** 2;\t# square T value\r\n\t\t\r\n\t\t# print the results from this\r\n\t\tprint '\\t', T_valD['L'], '|' , T_valD['T'], '|', T_valD['T^2'];\r\n\t\t\r\n\t\t# add dict to list of results\r\n\t\tT_vals.append(T_valD);\r\n\t\t\r\n\t# \tplot graph with gnuplot\r\n\twrite_gnuplot_datafile(\"T2_vs_L\", T_vals);\r\n\t\r\n\t#\twrite results for latex\r\n\tdef __doformat_L_3T (entry):\r\n\t\t# L | t1/2/3\r\n\t\tvals = [];\r\n\t\tvals.append(entry[0].toStr(latex=True));\r\n\t\tvals.append(entry[1].toStr(latex=True));\r\n\t\tvals.append(entry[2].toStr(latex=True));\r\n\t\tvals.append(entry[3].toStr(latex=True));\r\n\t\treturn vals;\r\n\twrite_latex_results(\"sphereResults\", \r\n\t\t[\"L (m)\", \"$t_1 (s)$\", \"$t_2 (s)$\", \"$t_3 (s)$\"],\r\n\t\t __doformat_L_3T, sphere_data,\r\n\t\t \"Raw data\");\r\n\t\r\n\tdef __doformat_T2_L (entry):\r\n\t\t# L | T^2\r\n\t\tvals = [];\r\n\t\tvals.append(entry['L'].toStr(latex=True));\r\n\t\tvals.append(entry['T^2'].toStr(latex=True));\r\n\t\treturn vals;\r\n\twrite_latex_results(\"sphereResultsA\", \r\n\t\t[\"L (m)\", \"$T^2 (s^2)$\"], \r\n\t\t__doformat_T2_L, T_vals,\r\n\t\t\"Processed data\");\r\n\t\r\n\t# \twait for user to input the necessary data (slope value)\r\n\tS = ui_GetSlope();\r\n\tprint \"\\tSlope of T^2 vs L is \", S\r\n\tn = calc_n(sphere_inertia, S, wire_average);\r\n\tprint \"\\tModulus of Rigidity of Wire, n, is \", n, \"\\n\"\r\n\t\r\n\t# inertia of heart disk\r\n\tprint \"Calculating Results for Heart-Shaped Disk\"\r\n\t#\tdata\r\n\thdisk_data= [[P(202, 1, mmDim),\tP(99.41, 0.05, sDim), P(99.24, 0.05, sDim), P(99.16, 0.05, sDim)]];\r\n\t\r\n\t# do calculations\r\n\t# \tlength first - convert units to SI-units\r\n\tT_valH = {'L':hdisk_data[0][0]};\r\n\tT_valH['L'].changeUnits(mDim);\r\n\t# \tcalculate T-Values, and store as separate \r\n\tT_valH['T']= calc_T(hdisk_data[0][1:]); # strip off the length var to get the data\r\n\tT_valH['T^2']= T_valH['T'] ** 2;\t# square T value\r\n\t#\tcalculate inertia of heart disk\r\n\thInertia= calc_HeartInertia(sphere_inertia, T_vals[-1]['T^2'], T_valH['T^2']);\r\n\t\r\n\t# print the results from this\r\n\tprint '\\t', \"Average T = \", T_valH['T'];\r\n\tprint '\\t', \"Average T^2 = \", T_valH['T^2'];\r\n\tprint '\\t', \"Inertia of Heart Disk = \", hInertia\t\t\r\n\t\r\n\t#\twrite results for latex\r\n\twrite_latex_results(\"heatResults\", \r\n\t\t[\"L (m)\", \"$t_1 (s)$\", \"$t_2 (s)$\", \"$t_3 (s)$\"],\r\n\t\t __doformat_L_3T, hdisk_data,\r\n\t\t \"Raw data for Heart-Shaped disk\");\r\n\t\r\n\t\r\n\t# done\r\n\tsys.stdin.readline();\r\n"
}
] | 4 |
bhavya1224/OpenCV_ | https://github.com/bhavya1224/OpenCV_ | 49fd7c1c65a8aa35cf0facad946d5e0af1653cbe | 3326534952aeee80c8ad88697e1257f75dcef370 | ff25cba8c43fa92ae6efc75aa5c7894557c9c7e2 | refs/heads/master | 2022-11-19T21:44:49.987325 | 2020-07-08T12:13:11 | 2020-07-08T12:13:11 | 278,079,088 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4943566620349884,
"alphanum_fraction": 0.544018030166626,
"avg_line_length": 28.964284896850586,
"blob_id": "3a953c074c4e7fd22e1b7e0620564a2f08fa6a5c",
"content_id": "18fc7aed71dada5903d27742f5f352d623db0bbe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 886,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 28,
"path": "/Open CV/mouse listner.py",
"repo_name": "bhavya1224/OpenCV_",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport cv2\r\n#event = [i for i in(dir(cv2) if 'EVENT' in i]\r\n#print(events)\r\n\r\ndef click(event, x,y,flags,param):\r\n if event==cv2.EVENT_LBUTTONDOWN:\r\n text = str(x) + ', '+ str(y)\r\n font = cv2.FONT_HERSHEY_SIMPLEX\r\n cv2.putText(frame,text, (x,y),font,1,(255,255,0), 2,cv2.LINE_AA)\r\n cv2.imshow('image',frame)\r\n \r\n if event==cv2.EVENT_RBUTTONDOWN:\r\n blue = frame[y, x, 0]\r\n green = frame[y, x, 1]\r\n red = frame[y, x, 2]\r\n font = cv2.FONT_HERSHEY_SIMPLEX\r\n text = str(blue) + ', ' + str(green) + ', ' + str(red)\r\n cv2.putText(frame,text, (x,y),font,0.5,(0,255,0), 2)\r\n cv2.imshow('image',frame)\r\n \r\n\r\nframe = np.zeros((512, 512, 3), np.uint8)\r\ncv2.imshow('image', frame)\r\n\r\ncv2.setMouseCallback('image', click)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n\r\n\r\n \r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5439121723175049,
"alphanum_fraction": 0.6187624931335449,
"avg_line_length": 30.322580337524414,
"blob_id": "ede8de17758102086241def29313d13517d10cc4",
"content_id": "6357dddd7b16ccf8150d025297f436efcb69e30e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1002,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 31,
"path": "/Open CV/motion detection.py",
"repo_name": "bhavya1224/OpenCV_",
"src_encoding": "UTF-8",
"text": "import cv2\r\nimport numpy as np\r\n\r\ncap = cv2.VideoCapture('m_obj.avi')\r\n\r\nret, frame1 = cap.read()\r\nret, frame2 = cap.read()\r\n\r\nwhile cap.isOpened():\r\n diff = cv2.absdiff(frame1, frame2)\r\n gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)\r\n blur = cv2.GaussianBlur(gray,(5,5), 0)\r\n _, thresh = cv2.threshold(blur,20,255,cv2.THRESH_BINARY)\r\n dilated = cv2.dilate(thresh, None, iterations=3)\r\n contours,_ = cv2.findContours(dilated,cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\r\n for contour in contours:\r\n (x,y,w,h) = cv2.boundingRect(contour)\r\n if cv2.contourArea(contour) < 2000:\r\n continue\r\n cv2.rectangle(frame1,(x, y), (x+w, y+h),(0,255,0),2)\r\n cv2.putText(frame1, \"status: MOVING\", (10,20), cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,0,255),2)\r\n\r\n #cv2.drawContours(frame1,contours,-1,(0,255,0),2)\r\n cv2.imshow('feed',frame1)\r\n frame1 = frame2\r\n ret, frame2 = cap.read()\r\n\r\n if cv2.waitKey(20) ==27:\r\n break;\r\n\r\ncv2.destroyAllWindows()\r\n"
},
{
"alpha_fraction": 0.5641183853149414,
"alphanum_fraction": 0.6029593348503113,
"avg_line_length": 30.440000534057617,
"blob_id": "5948463d997b59d40f68d7fa95a553978e033fa6",
"content_id": "edaf015d7051a614f4ca3754d06d941ecb37fa2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1622,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 50,
"path": "/Open CV/video object detection.py",
"repo_name": "bhavya1224/OpenCV_",
"src_encoding": "UTF-8",
"text": "#HSV -> Hue Saturation Value\r\n# hue corresponds to the color components(base pigmint),\r\n# hence just by selecting a range of Hue you can select any color.(0-360)\r\n\r\n# Saturantion is the amount of color\r\n\r\n#Value is basically the brightness of the color.\r\n\r\nimport cv2\r\nimport numpy as np\r\ncap = cv2.VideoCapture(0);\r\ndef nothing(x):\r\n pass\r\n\r\ncv2.namedWindow(\"tracking\")\r\ncv2.createTrackbar(\"lh\", \"tracking\", 0, 255, nothing)\r\ncv2.createTrackbar(\"ls\", \"tracking\", 0, 255, nothing)\r\ncv2.createTrackbar(\"lv\", \"tracking\", 0, 255, nothing)\r\ncv2.createTrackbar(\"uh\", \"tracking\", 255, 255, nothing)\r\ncv2.createTrackbar(\"us\", \"tracking\", 255, 255, nothing)\r\ncv2.createTrackbar(\"uv\", \"tracking\", 255, 255, nothing)\r\n\r\nwhile True:\r\n ret ,frame = cap.read()\r\n if ret== True:\r\n hsv = cv2.cvtColor(frame , cv2.COLOR_BGR2HSV)\r\n lh = cv2.getTrackbarPos(\"lh\", \"tracking\")\r\n ls = cv2.getTrackbarPos(\"ls\", \"tracking\")\r\n lv = cv2.getTrackbarPos(\"lv\", \"tracking\")\r\n uh = cv2.getTrackbarPos(\"uh\", \"tracking\")\r\n us = cv2.getTrackbarPos(\"us\", \"tracking\")\r\n uv = cv2.getTrackbarPos(\"uv\", \"tracking\")\r\n \r\n \r\n l_b = np.array([lh,ls,lv]) # lower color range\r\n u_b = np.array([uh,us,uv]) # upper color range\r\n mask = cv2.inRange(hsv, l_b, u_b)\r\n res = cv2.bitwise_and(frame,frame,mask=mask)\r\n \r\n \r\n cv2.imshow(\"frame\",frame)\r\n cv2.imshow(\"mask\",mask)\r\n cv2.imshow(\"result\",res)\r\n key = cv2.waitKey(1)\r\n if key == 27:\r\n break\r\n else:\r\n print(\" \")\r\ncap.release()\r\ncv2.destroyAllWindows()\r\n"
},
{
"alpha_fraction": 0.4878472089767456,
"alphanum_fraction": 0.5381944179534912,
"avg_line_length": 20.153846740722656,
"blob_id": "577b428634bfa50e45faa21f5f2aeead94ffbf0d",
"content_id": "3d8f139436d4b7f50f0c0e0977335f2b7dc54aad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 576,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 26,
"path": "/Open CV/color picker.py",
"repo_name": "bhavya1224/OpenCV_",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport cv2\r\n\r\n\r\ndef click(event, x,y,flags,param):\r\n if event==cv2.EVENT_LBUTTONDOWN:\r\n blue = frame[x,y,0]\r\n green = frame[x,y,1]\r\n red = frame[x,y,2]\r\n cv2.circle(frame, (x,y), 3 , (0,0,255), -1)\r\n myimage = np.zeros((512,512,3), np.uint8)\r\n myimage[:] = [blue, green, red]\r\n \r\n cv2.imshow('image', myimage)\r\n \r\n \r\n \r\n\r\n\r\nframe = cv2.imread('image1.jpg')\r\ncv2.imshow('image', frame)\r\npoints = []\r\n\r\ncv2.setMouseCallback('image', click)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n"
}
] | 4 |
fieldhawker/akagi | https://github.com/fieldhawker/akagi | f9b88fe53b3e2bf1eb50185f497af8c0a278e9b2 | 2890061ec3b5c8be5b9d17a7bcf70a939eccc41a | cd4d4e21d077857fd8d9c999feced51aac5f6b0d | refs/heads/master | 2022-12-21T22:20:15.857871 | 2019-09-09T13:32:12 | 2019-09-09T13:32:12 | 207,040,596 | 0 | 0 | null | 2019-09-08T00:17:40 | 2019-09-09T13:32:15 | 2022-12-08T06:08:34 | JavaScript | [
{
"alpha_fraction": 0.6175417900085449,
"alphanum_fraction": 0.6252983212471008,
"avg_line_length": 28.928571701049805,
"blob_id": "836539a723e7d673aeeb5d52cb06ebb4d162e009",
"content_id": "534f9888ae53a1e665610d03eb8a12f4c5831efa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1908,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 56,
"path": "/app/aimaker/views.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import HttpResponseRedirect\nfrom django.urls import reverse_lazy\nfrom django.utils import timezone\nfrom django_filters.views import FilterView\nfrom django.shortcuts import render\nfrom django.views.generic import TemplateView\nfrom app.aimaker.forms import AiMakerForm\nfrom app.aimaker.models import Photo\nfrom app.aimaker.utils import AiMakerRequest, AiMakerResponse\n\nimport logging\n\n\nclass AiMakerView(LoginRequiredMixin, TemplateView):\n \"\"\"\n AIメーカーへのリクエストを行う画面\n\n \"\"\"\n\n def __init__(self):\n self.params = {'state': \"\",\n 'label': \"\",\n 'score': \"\",\n 'base64': \"\",\n 'form': AiMakerForm()}\n\n # GETリクエスト(index.htmlを初期表示)\n def get(self, req):\n return render(req, 'aimaker/index.html', self.params)\n\n # POSTリクエスト(index.htmlに結果を表示)\n def post(self, req):\n # POSTされたフォームデータを取得\n form = AiMakerForm(req.POST, req.FILES)\n # フォームデータのエラーチェック\n if not form.is_valid():\n raise ValueError('invalid form')\n # フォームデータから画像ファイルを取得\n photo = Photo(image=form.cleaned_data['image'])\n\n # 画像ファイルをbase64で受け取る\n base64 = photo.image_src()\n\n # AIメーカー\n response = AiMakerRequest(base64)\n result = AiMakerResponse(response)\n\n # 結果を格納\n self.params['state'] = result['state']\n self.params['label'] = result['label']\n self.params['score'] = '{:.2%}'.format(result['score'])\n self.params['base64'] = base64\n\n # ページの描画指示\n return render(req, 'aimaker/result.html', self.params)\n"
},
{
"alpha_fraction": 0.6906740665435791,
"alphanum_fraction": 0.6934441328048706,
"avg_line_length": 31.81818199157715,
"blob_id": "3648af0bba1c7dad347cc22d87021ca611802159",
"content_id": "a31c81391d3bb2ad8f4b8ecd1c05afd13eb449b3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1131,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 33,
"path": "/app/urls.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from django.urls import path\nfrom django.conf.urls import url, include\n\nfrom .models import Item\nfrom django.views.generic import TemplateView\n\nfrom .views import ItemFilterView, ItemDetailView, ItemCreateView, ItemUpdateView, ItemDeleteView, FaceView\nfrom .views_top import TopIndexView\nfrom .aimaker.views import AiMakerView\n\n# アプリケーションのルーティング設定\n\nurlpatterns = [\n path('', TopIndexView.as_view(), name='top'),\n\n # ml5.js\n path('ml5/styletransfervideo', TemplateView.as_view(\n template_name='app/ml5/style_transfer_video.html'), name='style_transfer_video'),\n\n # 顔判定\n path('face/', FaceView.as_view(), name='face'),\n\n # CRUD\n path('data/detail/<int:pk>/', ItemDetailView.as_view(), name='detail'),\n path('data/create/', ItemCreateView.as_view(), name='create'),\n path('data/update/<int:pk>/', ItemUpdateView.as_view(), name='update'),\n path('data/delete/<int:pk>/', ItemDeleteView.as_view(), name='delete'),\n path('data/', ItemFilterView.as_view(), name='index'),\n\n # AIメーカー\n path('aimaker/', AiMakerView.as_view(), name='aimaker'),\n\n]\n"
},
{
"alpha_fraction": 0.4941176474094391,
"alphanum_fraction": 0.6951871514320374,
"avg_line_length": 16.314815521240234,
"blob_id": "12e55221dbb41f848a0a20a151506607fa6acc5e",
"content_id": "ce525cf09286005f01204db2137e0e0e952ed06d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 935,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 54,
"path": "/requirements.txt",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "absl-py==0.8.0\nastor==0.8.0\nastroid==2.2.5\nbeautifulsoup4==4.8.0\ncertifi==2019.6.16\nchardet==3.0.4\ncycler==0.10.0\ndj-database-url==0.5.0\nDjango==2.1.2\ndjango-crispy-forms==1.7.2\ndjango-filter==2.0.0\ndjango-heroku==0.3.1\ngast==0.3.1\ngoogle-pasta==0.1.7\ngraphviz==0.12\ngrpcio==1.23.0\ngunicorn==19.9.0\nh5py==2.10.0\nidna==2.8\nisort==4.3.21\nKeras==2.2.5\nKeras-Applications==1.0.8\nKeras-Preprocessing==1.1.0\nkiwisolver==1.1.0\nlazy-object-proxy==1.4.2\nlxml==4.4.1\nMarkdown==3.1.1\nmatplotlib==3.1.1\nmccabe==0.6.1\nnumpy==1.17.2\nopencv-python==4.1.1.26\nPillow==6.1.0\nprotobuf==3.9.1\npsycopg2==2.8.3\npydotplus==2.0.2\npylint==2.3.1\npyparsing==2.4.2\npython-dateutil==2.8.0\npython-dotenv==0.10.3\npytz==2018.5\nPyYAML==5.1.2\nrequests==2.22.0\nscipy==1.3.1\nsix==1.12.0\nsoupsieve==1.9.3\ntensorboard==1.14.0\ntensorflow==1.14.0\ntensorflow-estimator==1.14.0\ntermcolor==1.1.0\ntyped-ast==1.4.0\nurllib3==1.25.3\nWerkzeug==0.15.6\nwhitenoise==4.1.3\nwrapt==1.11.2\n"
},
{
"alpha_fraction": 0.7903743386268616,
"alphanum_fraction": 0.792513370513916,
"avg_line_length": 32.35714340209961,
"blob_id": "2dfa6290804ce40aaf1778a13196a97e995fe0e0",
"content_id": "cac3b5db9736063b6ada78e6f41ffda94ee1ad2b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1137,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 28,
"path": "/app/views_top.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.mixins import LoginRequiredMixin\n# from django.http import HttpResponseRedirect\n# from django.urls import reverse_lazy\n# from django.utils import timezone\n# from django.views.generic import DetailView\n# from django.views.generic.edit import CreateView, UpdateView, DeleteView\n# from django_filters.views import FilterView\nfrom django.views.generic.base import TemplateView\n\n# from .filters import ItemFilterSet\n# from .forms import ItemForm\n# from .models import Item\n\n\n# 未ログインのユーザーにアクセスを許可する場合は、LoginRequiredMixinを継承から外してください。\n#\n# LoginRequiredMixin:未ログインのユーザーをログイン画面に誘導するMixin\n# 参考:https://docs.djangoproject.com/ja/2.1/topics/auth/default/#the-loginrequired-mixin\n\nclass TopIndexView(LoginRequiredMixin, TemplateView):\n \"\"\"\n ビュー:トップページ表示画面\n\n 以下のパッケージを使用\n ・django-filter 一覧画面(ListView)に検索機能を追加\n https://django-filter.readthedocs.io/en/master/\n \"\"\"\n template_name = 'app/top_index.html'\n\n"
},
{
"alpha_fraction": 0.4848144054412842,
"alphanum_fraction": 0.5359954833984375,
"avg_line_length": 19.917646408081055,
"blob_id": "762a14fa53dd4201899ebd9f2b29681e7b170038",
"content_id": "59d8474003a9266e347db33c2095e6705254e42e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1928,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 85,
"path": "/app/aimaker/utils.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from logging import basicConfig, getLogger, DEBUG\nfrom django.http import HttpResponse\nimport requests\nimport json\nimport io\nimport base64\n\nbasicConfig(level=DEBUG)\nlogger = getLogger(__name__)\n\n\ndef AiMakerRequest(base64):\n\n logger.debug('AiMakerRequest')\n # AIメーカーのWebAPIから取得したデータを返却する\n\n API_Key = 'c28f3694803e7631c5feb0831f29be77670a5d6f77ede6be444afd0b6d86280d2b67b3cc50730753e7e49c2342fd5b18'\n id = 3673\n url = 'https://aimaker.io/image/classification/api'\n\n query = {\n 'id': id,\n 'apikey': API_Key,\n 'base64': base64\n }\n\n try:\n # APIリクエスト\n response = requests.post(url, query)\n response = response.json()\n # logger.debug(response)\n return response\n\n except:\n logger.debug('Except AiMakerRequest.')\n\n return []\n\n\ndef AiMakerResponse(json):\n logger.debug('AiMakerResponse')\n # AIメーカーから受け取ったJSONから必要な値を取得\n\n # 想定されるレスポンスは以下のようになっている\n # {\n # \"state\": 1,\n # \"url\": \"https://aimaker.io/sample.png\",\n # \"labels\": {\n # \"0\": {\n # \"score\": 0.997,\n # \"label\": \"ラベル0\"\n # },\n # \"1\": {\n # \"score\": 0.003,\n # \"label\": \"ラベル1\"\n # }\n # }\n # }\n\n result = {\n 'state': 'NG',\n 'label': \"ラベル0\",\n 'score': 0\n }\n\n max_score = 0\n max_label = ''\n\n if not json['state'] == 1:\n logger.debug(result)\n return result\n\n for label in json['labels']:\n if max_score < label[\"score\"]:\n max_score = label[\"score\"]\n max_label = label[\"label\"]\n\n result = {\n 'state': 'OK',\n 'label': max_label,\n 'score': max_score\n }\n\n # logger.debug(result)\n return result\n"
},
{
"alpha_fraction": 0.6092967391014099,
"alphanum_fraction": 0.6106992363929749,
"avg_line_length": 25.54787254333496,
"blob_id": "eded72249e8277bf5c5ea13bc5479579803e53ae",
"content_id": "06188029e5290473c0c5df45c1d0c980c7ae65de",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5981,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 188,
"path": "/app/views.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import HttpResponseRedirect\nfrom django.urls import reverse_lazy\nfrom django.utils import timezone\nfrom django.views.generic import DetailView\nfrom django.views.generic.edit import CreateView, UpdateView, DeleteView\nfrom django_filters.views import FilterView\n\nfrom .filters import ItemFilterSet\nfrom .forms import ItemForm\nfrom .models import Item\n\n# 顔判定\nfrom django.shortcuts import render\nfrom django.views.generic import TemplateView\nfrom .forms import ImageForm\nfrom .face import detect\n\n\nimport logging\n\n\n# 未ログインのユーザーにアクセスを許可する場合は、LoginRequiredMixinを継承から外してください。\n#\n# LoginRequiredMixin:未ログインのユーザーをログイン画面に誘導するMixin\n# 参考:https://docs.djangoproject.com/ja/2.1/topics/auth/default/#the-loginrequired-mixin\n\nclass ItemFilterView(LoginRequiredMixin, FilterView):\n \"\"\"\n ビュー:一覧表示画面\n\n 以下のパッケージを使用\n ・django-filter 一覧画面(ListView)に検索機能を追加\n https://django-filter.readthedocs.io/en/master/\n \"\"\"\n model = Item\n\n # django-filter 設定\n filterset_class = ItemFilterSet\n # django-filter ver2.0対応 クエリ未設定時に全件表示する設定\n strict = False\n\n # 1ページの表示\n paginate_by = 10\n\n def get(self, request, **kwargs):\n \"\"\"\n リクエスト受付\n セッション変数の管理:一覧画面と詳細画面間の移動時に検索条件が維持されるようにする。\n \"\"\"\n\n logger = logging.getLogger('django')\n logger.info('ItemFilterView.get request.')\n # 一覧画面内の遷移(GETクエリがある)ならクエリを保存する\n if request.GET:\n request.session['query'] = request.GET\n # 詳細画面・登録画面からの遷移(GETクエリはない)ならクエリを復元する\n else:\n request.GET = request.GET.copy()\n if 'query' in request.session.keys():\n for key in request.session['query'].keys():\n request.GET[key] = request.session['query'][key]\n\n return super().get(request, **kwargs)\n\n def get_queryset(self):\n \"\"\"\n ソート順・デフォルトの絞り込みを指定\n \"\"\"\n # デフォルトの並び順として、登録時間(降順)をセットする。\n return Item.objects.all().order_by('-created_at')\n\n def get_context_data(self, *, object_list=None, **kwargs):\n \"\"\"\n 表示データの設定\n \"\"\"\n # 表示データを追加したい場合は、ここでキーを追加しテンプレート上で表示する\n # 例:kwargs['sample'] = 'sample'\n return super().get_context_data(object_list=object_list, **kwargs)\n\n\nclass ItemDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n ビュー:詳細画面\n \"\"\"\n model = Item\n\n def get_context_data(self, **kwargs):\n \"\"\"\n 表示データの設定\n \"\"\"\n # 表示データの追加はここで 例:\n # kwargs['sample'] = 'sample'\n return super().get_context_data(**kwargs)\n\n\nclass ItemCreateView(LoginRequiredMixin, CreateView):\n \"\"\"\n ビュー:登録画面\n \"\"\"\n model = Item\n form_class = ItemForm\n success_url = reverse_lazy('index')\n\n def form_valid(self, form):\n \"\"\"\n 登録処理\n \"\"\"\n item = form.save(commit=False)\n item.created_by = self.request.user\n item.created_at = timezone.now()\n item.updated_by = self.request.user\n item.updated_at = timezone.now()\n item.save()\n\n return HttpResponseRedirect(self.success_url)\n\n\nclass ItemUpdateView(LoginRequiredMixin, UpdateView):\n \"\"\"\n ビュー:更新画面\n \"\"\"\n model = Item\n form_class = ItemForm\n success_url = reverse_lazy('index')\n\n def form_valid(self, form):\n \"\"\"\n 更新処理\n \"\"\"\n item = form.save(commit=False)\n item.updated_by = self.request.user\n item.updated_at = timezone.now()\n item.save()\n\n return HttpResponseRedirect(self.success_url)\n\n\nclass ItemDeleteView(LoginRequiredMixin, DeleteView):\n \"\"\"\n ビュー:削除画面\n \"\"\"\n model = Item\n success_url = reverse_lazy('index')\n\n def delete(self, request, *args, **kwargs):\n \"\"\"\n 削除処理\n \"\"\"\n item = self.get_object()\n item.delete()\n\n return HttpResponseRedirect(self.success_url)\n\n\nclass FaceView(TemplateView):\n # コンストラクタ\n def __init__(self):\n self.params = {'result_list': [],\n 'result_name': \"\",\n 'result_img': \"\",\n 'form': ImageForm()}\n\n # GETリクエスト(index.htmlを初期表示)\n def get(self, req):\n try:\n logger = logging.getLogger('django')\n logger.info('FaceView.get request.')\n return render(req, 'face/index.html', self.params)\n except:\n import traceback\n traceback.print_exc()\n \n # POSTリクエスト(index.htmlに結果を表示)\n def post(self, req):\n # POSTされたフォームデータを取得\n form = ImageForm(req.POST, req.FILES)\n # フォームデータのエラーチェック\n if not form.is_valid():\n raise ValueError('invalid form')\n # フォームデータから画像ファイルを取得\n image = form.cleaned_data['image']\n # 画像ファイルを指定して顔分類\n result = detect(image)\n # 顔分類の結果を格納\n self.params['result_list'], self.params['result_name'], self.params['result_img'] = result\n # ページの描画指示\n return render(req, 'face/index.html', self.params)\n"
},
{
"alpha_fraction": 0.6095505356788635,
"alphanum_fraction": 0.6432584524154663,
"avg_line_length": 26.384614944458008,
"blob_id": "d2cda3ae7609c980e2095b5e945d8312d4f7e7b0",
"content_id": "751cda8c5fb17eed65721b0625d9562919a84dc8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 380,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 13,
"path": "/app/aimaker/models.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport io\nimport base64\n\nclass Photo(models.Model):\n # 保存先ディレクトリの指定\n image = models.ImageField(upload_to='images')\n\n def image_src(self):\n with self.image.open() as img:\n base64_img = base64.b64encode(img.read()).decode()\n\n return 'data:' + img.file.content_type + ';base64,' + base64_img\n"
},
{
"alpha_fraction": 0.4163346588611603,
"alphanum_fraction": 0.425298810005188,
"avg_line_length": 19.510204315185547,
"blob_id": "ba15f110ef9b8a3d70b12723cc34072a9b3e56ba",
"content_id": "66b08761459c08d9f870bafabe52b39894e571b8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 1042,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 49,
"path": "/app/templates/aimaker/index.html",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "{% extends 'layout/_base.html' %}\n{% block title %}ボトラーAI{% endblock %}\n\n{% block content %}\n\n{% load crispy_forms_tags %}\n\n\n<div class=\"container my-3\">\n <div class=\"row\">\n <div class=\"col-sm-2\">\n </div>\n <div class=\"col-sm-8\">\n <h1>AIメーカーへのリクエスト</h1>\n <p></p>\n <p id='status'></p>\n\n <p></p>\n\n </div>\n <div class=\"col-sm-2\">\n </div>\n </div>\n <div class=\"row\">\n <div class=\"col-sm-2\">\n </div>\n\n <form method=\"POST\" action=\"{% url 'aimaker' %}\" enctype=\"multipart/form-data\" class=\"form-horizontal\">\n {% csrf_token %}\n <div class='form-group'>\n <div class=\"col-sm-8\">\n {{form.as_p}}\n <button type=\"submit\" class=\"btn btn-primary\">分類する</button>\n <br />\n </div>\n </div>\n </form>\n\n <div class=\"col-sm-2\">\n </div>\n\n </div>\n</div>\n\n\n\n\n\n{% endblock %}"
},
{
"alpha_fraction": 0.5362517237663269,
"alphanum_fraction": 0.5389876961708069,
"avg_line_length": 26.11111068725586,
"blob_id": "9dc680a2fbc7c86e31a5b0c51fa52b51cdb9f941",
"content_id": "9c309d2ef3242da3d87f5656acce4e953cd9322a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 969,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 27,
"path": "/app/forms.py",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "from django import forms\n\nfrom .models import Item\n\n\nclass ItemForm(forms.ModelForm):\n \"\"\"\n モデルフォーム構成クラス\n ・公式 モデルからフォームを作成する\n https://docs.djangoproject.com/ja/2.1/topics/forms/modelforms/\n \"\"\"\n\n class Meta:\n model = Item\n fields = '__all__'\n\n # 以下のフィールド以外が入力フォームに表示される\n # AutoField\n # auto_now=True\n # auto_now_add=Ture\n # editable=False\n\nclass ImageForm(forms.Form):\n image = forms.ImageField(label=\"判定する画像を選択してください\",\n error_messages={'missing' : '画像ファイルが選択されていません。',\n 'invalid' : '分類する画像ファイルを選択してください。',\n 'invalid_image' : '画像ファイルではないようです。'})"
},
{
"alpha_fraction": 0.7053706645965576,
"alphanum_fraction": 0.7628592848777771,
"avg_line_length": 22.192981719970703,
"blob_id": "cc66bc563bf824ddcc5349d86902a69209b0f676",
"content_id": "ebe8c0dd3d8f4d4d7117ca05771449a28ba7dc39",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3198,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 114,
"path": "/README.md",
"repo_name": "fieldhawker/akagi",
"src_encoding": "UTF-8",
"text": "akagi\n====\n\nSEP勉強会用のDjangoアプリケーション\n\n参考:\n[Python] プログラム初心者のためのWebアプリ簡単作成法\nhttps://qiita.com/okoppe8/items/4cc0f87ea933749f5a49\n\nリポジトリ:\nhttps://github.com/fieldhawker/akagi\n\n[Qiita: [Python] テンプレートアプリを使った業務用Webアプリケーション高速開発法の紹介【チュートリアル形式】](https://qiita.com/okoppe8/items/4cc0f87ea933749f5a49)\n\n環境構築\npython3 -m venv env\nsource env/bin/activate\npip install -r requirements.txt\npython manage.py migrate\npython manage.py createsuperuser \n\n起動\npython manage.py runserver\n\n## Requirement\n\n```\nDjango==2.1.1\ndjango-crispy-forms==1.7.2\ndjango-filter==2.0.0\npytz==2018.5\n```\n\n## Usage\n\nSteps\n\n1. Git clone this project\n2. Edit modelfile `app/models.py`\n3. Run `makemigrations` and `migrate`\n4. Edit HTML files `templates/item_filter.html` and `item_detail_contents.html`\n\nIf you use it production environment, you must replace `settings.SECRET_KEY`.\n\n## Contribution\n\n\n\n## Licence\n\n\n\n## Author\n\n\n## Command\n\nimage-collector 画像収集\nhttps://github.com/skmatz/image-collector\n\npython image_collector.py -t 佐倉綾音 -n 10 -d sakura_images\npython image_collector.py -t Gackt -n 200 -d google_images\npython image_collector.py -t オードリー春日 -n 200 -d google_images\n\nimg_face_dt 画像から顔を切り出し\nhttps://qiita.com/kerobot/items/e3abe3f21808b4b584bd\n\npython img_face_dt.py\n 実行時は以下のパスを任意に書き換えて実行\n IMAGE_PATH_PATTERN = \"./sakura_images/佐倉綾音/*\"\n IMAGE_PATH_PATTERN = \"./google_images/Gackt/*\"\n IMAGE_PATH_PATTERN = \"./google_images/オードリー春日/*\"\n OUTPUT_IMAGE_DIR = \"./face_image\"\n OUTPUT_IMAGE_DIR = \"./face_image/gackt\"\n OUTPUT_IMAGE_DIR = \"./face_image/オードリー春日\"\n\nimg_data_gen 画像を水増し\nhttps://qiita.com/kerobot/items/54bc1224424280150d1c\n\npython img_data_gen.py\n 実行時は以下のパスを任意に書き換えて実行\n IMAGE_PATH_PATTERN = \"./face_image/*\"\n IMAGE_PATH_PATTERN = \"./face_image/gackt/*\"\n IMAGE_PATH_PATTERN = \"./face_image/オードリー春日/*\"\n OUTPUT_IMAGE_DIR = \"./face_scratch_image\"\n OUTPUT_IMAGE_DIR = \"./face_scratch_image/gackt\"\n OUTPUT_IMAGE_DIR = \"./face_scratch_image/オードリー春日\"\n\nimg_model_gen 顔分類モデルの生成\nhttps://qiita.com/kerobot/items/be4fd1a166073fbcff38\n\n実行にはgraphvizが必要なのでローカル環境にインストールすること\nhttps://analytics-note.xyz/mac/mac-graphviz-install/\n brew install graphviz\n\npython img_model_gen.py\n 実行時は以下のパスを任意に書き換えて実行\n TEST_IMAGE_DIR = \"./test_image\"\n TRAIN_IMAGE_DIR = \"./face_scratch_image\"\n OUTPUT_MODEL_DIR = \"./model\"\n\nimg_decide 顔判定モデルで画像判定処理\nhttps://qiita.com/kerobot/items/1ce75622d173d231d423\n\npython img_decide.py 853620.jpeg\npython img_decide.py tmp/000019_002.jpg\npython img_decide.py tmp/010008_002.jpg\n\n\nfrom .cv2 import *\nImportError: libSM.so.6: cannot open shared object file: No such file or directory\n\nherokuではlibSM.so.6がないようなので, 入れる\nhttps://qiita.com/haru1843/items/210cb08024195b9d1bc8\n"
}
] | 10 |
osom2187/Codecademy_challenge_project | https://github.com/osom2187/Codecademy_challenge_project | dd7742ecaebf432a05593fd02113d4ceae63eacc | 0d366bcb25816a1144abd9626558017104755099 | a40745a1e2c7def444c36247a6a0a1f396a62939 | refs/heads/main | 2023-02-12T09:02:45.517935 | 2021-01-12T20:05:09 | 2021-01-12T20:05:09 | 329,098,797 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.570871889591217,
"alphanum_fraction": 0.6268875002861023,
"avg_line_length": 26.54166603088379,
"blob_id": "c2ffeb7dd5f58fc87a5a704a4857109aff0774e1",
"content_id": "83c29d35bc35c682af7764718a2003606da9f2e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2053,
"license_type": "no_license",
"max_line_length": 172,
"num_lines": 72,
"path": "/codecademy_cleaning_data_challenge.py",
"repo_name": "osom2187/Codecademy_challenge_project",
"src_encoding": "UTF-8",
"text": "medical_data = \\\r\n\"\"\"Marina Allison ,27 , 31.1 , \r\n#7010.0 ;Markus Valdez , 30, \r\n22.4, #4050.0 ;Connie Ballard ,43 \r\n, 25.3 , #12060.0 ;Darnell Weber \r\n, 35 , 20.6 , #7500.0;\r\nSylvie Charles ,22, 22.1 \r\n,#3022.0 ; Vinay Padilla,24, \r\n26.9 ,#4620.0 ;Meredith Santiago, 51 , \r\n29.3 ,#16330.0; Andre Mccarty, \r\n19,22.7 , #2900.0 ; \r\nLorena Hodson ,65, 33.1 , #19370.0; \r\nIsaac Vu ,34, 24.8, #7045.0\"\"\"\r\n\r\n# Add your code here\r\n# print(medical_data)\r\nupdated_medical_data = medical_data.replace('#','$')\r\n# print(updated_medical_data)\r\nnum_records = 0\r\nfor data in updated_medical_data: \r\n if data == '$': \r\n num_records += 1\r\n# print('There are '+str(num_records)+ ' medical records in the data')\r\nmedical_data_split = updated_medical_data.split(';')\r\n# print(medical_data_split)\r\nmedical_records = []\r\nfor item in medical_data_split:\r\n medical_records.append(item.split(','))\r\n# print(medical_records)\r\n\r\nmedical_records_clean = []\r\nfor stuff in medical_records: \r\n record_clean = []\r\n for things in stuff: \r\n record_clean.append(things.strip())\r\n medical_records_clean.append(record_clean)\r\n\r\n# print(medical_records_clean)\r\n# for record in medical_records_clean:\r\n # print(record[0].upper())\r\n\r\nnames = []\r\nages = []\r\nbmis = []\r\ninsurance_costs = []\r\n\r\nfor record in medical_records_clean:\r\n names.append(record[0])\r\n ages.append(record[1])\r\n bmis.append(record[2])\r\n insurance_costs.append(record[3])\r\n\r\n# print(names, ages, bmis, insurance_costs)\r\n\r\ntotal_bmi = 0 \r\nfor item in bmis:\r\n total_bmi = total_bmi + float(item)\r\n\r\naverage_bmi = total_bmi / len(bmis)\r\n# print('Average BMI: ' + str(average_bmi))\r\n\r\ntotal_insurance_costs = 0\r\nfor costs in insurance_costs:\r\n total_insurance_costs = float(costs[1:]) + total_insurance_costs\r\n\r\naverage_insurance_costs = total_insurance_costs / len(insurance_costs)\r\n# print(average_insurance_costs)\r\n\r\ncounter = 0\r\nfor items in names: \r\n print(str(names[counter]) + ' is ' + str(ages[counter]) + ' years old with a BMI of ' + str(bmis[counter]) + ' and an insurance cost of ' + str(insurance_costs[counter]))\r\n counter += 1"
}
] | 1 |
itskathylam/phd | https://github.com/itskathylam/phd | 50c69db89adbf46b75816dfa2f85f0fd68744061 | a2f2a94f5bf8324d4a142040d66fd574c42b0211 | 484d9510c957ef5ee282a15b73500b04fed95b38 | refs/heads/master | 2021-01-10T10:23:24.319455 | 2016-03-15T22:42:11 | 2016-03-15T22:42:11 | 36,387,527 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6771891117095947,
"alphanum_fraction": 0.6876201033592224,
"avg_line_length": 36.6489372253418,
"blob_id": "b510e28c3471c21023c35956a70a7cc94d22dbd0",
"content_id": "f1cb18e65a17809c5ee5e3002941f33669b7eef4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3643,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 94,
"path": "/2014_lam_plos/parse_all-by-all_blast_73-clones_check_homology-similarity_v4.py",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\r\n\r\nfrom Bio.Blast import NCBIXML\r\n'''\r\n#From command line, execute all-by-all blastn to generate results.xml:\r\n#blastn -query contigs-5.fa -subject contigs-5.fa -evalue .001 -out results.xml -outfmt 5\r\n'''\r\n\r\nfrom interval import Interval, IntervalSet\r\n'''\r\nAn Interval is composed of the lower bound, a closed lower bound ^M\r\n flag, an upper bound, and a closed upper bound flag. The attributes^M\r\n are called lower_bound, lower_closed, upper_bound, and upper_closed,^M\r\n respectively. For an infinite interval, the bound is set to inf or ^M\r\n -inf. IntervalSets are composed of zero to many Intervals.\r\n \r\n#become familiar with interval usage \r\nr1 = IntervalSet([Interval(1, 1000)])\r\nr2 = IntervalSet([Interval(30, 50)])\r\nr3 = IntervalSet([Interval(1200, 1300)])\r\nprint r1 - r2\r\nprint r1 + r2\r\nx = r1 + r3\r\nprint x\r\nfor interval in x:\r\n print interval.lower_bound\r\n print interval.upper_bound\r\n'''\r\n\r\nfile = open(\"results.xml\") \r\nblast_records = NCBIXML.parse(file)\r\n\r\n##accumulate distance between contig pairs in dictionary (where 1 = identical)\r\ndistance = {}\r\n\r\n##for each queried sequence\r\nfor blast_record in blast_records:\r\n #print \"\\n\" + blast_record.query\r\n #print str(blast_record.query_letters)\r\n \r\n ##for each subject sequence\r\n for alignment in blast_record.alignments:\r\n\t\r\n ##accumulate hsp intervals for each subject sequence, by iterating through each hsp\r\n hsp_interval_list = []\r\n for hsp in alignment.hsps:\r\n\r\n\t ##if alignment was on subject complement, subtract alignment length from start to get interval\r\n\t if hsp.frame == (1,-1):\r\n\t\thsp_interval = IntervalSet([Interval(hsp.sbjct_start, hsp.sbjct_start - hsp.align_length)])\r\n\t\thsp_interval_list.append(hsp_interval)\r\n\t \r\n\t ##otherwise, alignment was on subject given strand, add alignment length to start to get interval\r\n\t else:\r\n\t\thsp_interval = IntervalSet([Interval(hsp.sbjct_start, hsp.sbjct_start + hsp.align_length)])\r\n\t\thsp_interval_list.append(hsp_interval)\r\n\r\n\t\r\n\t##use interval addition to remove overlapping regions over hsps\r\n\tnew_intervalset = IntervalSet()\r\n\tfor interval in hsp_interval_list: \r\n\t\tnew_intervalset = new_intervalset + interval\r\n\r\n\t##calculate length of the subject sequence that was involved in the alignment = [aligned length]\r\n\trange_list =[]\r\n\tfor interval in new_intervalset:\r\n\t start = interval.lower_bound\r\n\t end = interval.upper_bound\r\n\t for i in range(start, end):\r\n\t\trange_list.append(i)\r\n\t \r\n\t##check which of query/subject is shorter; then divide the [aligned length] by length of the shorter one\r\n\t##note: blast_record.query_letters = query length; alignment.length = subject length\r\n\t##keep track of the fraction and query/subject names for putting in dict\r\n\tfraction = 0\r\n\tif blast_record.query_letters <= alignment.length:\r\n\t fraction = float(len(range_list))/blast_record.query_letters\r\n\telse:\r\n\t fraction = float(len(range_list))/alignment.length\r\n\t \r\n\t##save the fraction (distance), which represents the homology between the query and subject\r\n\t##put the names into a list to sort; this overwrites duplicate key-value pairs in the dictionary\r\n\tname_pair = [str(blast_record.query), str(alignment.hit_def)]\r\n\tname_pair = sorted(name_pair)\r\n\tnew_name_pair = \":\".join(name_pair)\r\n\tdistance[str(new_name_pair)] = fraction \r\n\t \r\n##write distances to file\r\nout = open(\"out.txt\", \"w\")\r\nfor item in distance:\r\n #print item + \"\\t\\t\\t\" + str(distance[item])\r\n names = item.split(\":\")\r\n row = names[0] + \",\" + names[1] + \",\" + str(distance[item]) + \"\\n\"\r\n out.write(row)\r\n\r\n \r\n\r\n"
},
{
"alpha_fraction": 0.5902354121208191,
"alphanum_fraction": 0.5995350480079651,
"avg_line_length": 30.158878326416016,
"blob_id": "e23eb636bd5d184b08a0a21d3426dd5b059d9bca",
"content_id": "d354108f84a00d623b79584e94a82e39c22a587e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3441,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 107,
"path": "/2015_lam_microbiome/filter_ec_vector_use_blat_batch_v2.py",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\r\n\r\nfrom Bio import SeqIO\r\nimport sys\r\nimport os\r\nimport time\r\n\r\n\r\n\r\n#FUNCTIONS\r\n\r\n#run blat and parse results; return a set of unique read names that are hits to the subject\r\ndef run_blat(files_dir, reads_filename, subject_filename):\r\n \r\n #run blat in the shell\r\n results_filename = reads_filename + \"_BLAT_\" + subject_filename + \".psl\"\r\n os.system(\"blat \" + subject_filename + \" \" + files_dir + reads_filename + \" \" + files_dir + results_filename)\r\n\r\n #open results \r\n results_file = open(files_dir + results_filename)\r\n \r\n #clear the header lines\r\n for i in range(0,5):\r\n results_file.readline()\r\n\r\n #track the names of reads that are 100% identical to E. coli (90 base identity)\r\n match_names = set()\r\n for line in results_file:\r\n \r\n #parse the line\r\n line = line.split('\\t')\r\n match = line[0]\r\n mismatch = line[1]\r\n gaps = line[6]\r\n query_name = line[9]\r\n \r\n #if the match was 100% identical (90 bases), accumulate the name\r\n if match == '90' and mismatch == '0' and gaps == '0':\r\n match_names.add(query_name)\r\n \r\n #delete psl files\r\n os.system(\"rm \" + files_dir + \"*.psl\")\r\n \r\n return match_names\r\n\r\n\r\n\r\n#INPUT FILES\r\n\r\nfilenames_dir = sys.argv[1]\r\nvector_filename = sys.argv[2]\r\nec_filename = sys.argv[3]\r\n\r\n#get list of filenames into array to process\r\nfilenames = os.listdir(filenames_dir)\r\nfilenames.sort()\r\n\r\n\r\n\r\n#RUN BLAT AND PARSE RESULTS FOR EACH FILE\r\n\r\n#write summary file of results\r\nsummary_file = open(filenames_dir + \"summary.txt\", \"w\")\r\nsummary_file.write(\"filename \\ttotal reads \\ttotal dirty \\tec \\tvector \\n\")\r\n\r\n#process files\r\nfor filename in filenames:\r\n \r\n #get sets of read names that are hits\r\n ec_hits = run_blat(filenames_dir, filename, ec_filename)\r\n vector_hits = run_blat(filenames_dir, filename, vector_filename) \r\n \r\n #track for summary file\r\n total_count = 0\r\n total_dirty_count = 0\r\n vector_count = 0\r\n ec_count = 0\r\n \r\n #write clean and dirty reads to new files; also summary file\r\n clean_file = open(filenames_dir + filename + \"_clean_chked.fa\", \"w\")\r\n dirty_file = open(filenames_dir + filename + \"_dirty_chked.fa\", \"w\")\r\n \r\n #open the reads file; for each FASTA sequence read\r\n for seq_record in SeqIO.parse(filenames_dir + filename, \"fasta\"):\r\n total_count = total_count + 1\r\n \r\n if (seq_record.id in ec_hits):\r\n SeqIO.write(seq_record, dirty_file, \"fasta\")\r\n ec_hits.remove(seq_record.id) #remove id from set to make following searches faster \r\n ec_count = ec_count + 1\r\n total_dirty_count = total_dirty_count + 1\r\n \r\n elif (seq_record.id in vector_hits):\r\n SeqIO.write(seq_record, dirty_file, \"fasta\")\r\n vector_hits.remove(seq_record.id) #remove id from set to make following searches faster \r\n vector_count = vector_count + 1\r\n total_dirty_count = total_dirty_count + 1\r\n \r\n #if not in list of read names, it's a clean read\r\n else:\r\n \r\n #write to clean file\r\n SeqIO.write(seq_record, clean_file, \"fasta\")\r\n \r\n #write to summary\r\n output = filename + \"\\t\" + str(total_count) + \"\\t\" + str(total_dirty_count) + \"\\t\" + str(ec_count) + \"\\t\" + str(vector_count) + \"\\n\"\r\n summary_file.write(output)\r\n"
},
{
"alpha_fraction": 0.6160377264022827,
"alphanum_fraction": 0.6240565776824951,
"avg_line_length": 31.650793075561523,
"blob_id": "4010d5c4ac12fc61636b7a043f329271fe200280",
"content_id": "e56ebf49eb81b400d819d9d4ff03e57e2fb59164",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2120,
"license_type": "no_license",
"max_line_length": 154,
"num_lines": 63,
"path": "/2015_lam_microbiome/check_filtering_ec_or_vector_batch_v1.py",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\r\n\r\nfrom Bio import SeqIO\r\nimport sys\r\nimport os\r\nimport time\r\n\r\n\r\n\r\n#input: directory of files to process; fasta Ec file; fasta vector file\r\nfilenames_dir = sys.argv[1]\r\nvector_filename = sys.argv[2]\r\nec_filename = sys.argv[3]\r\n\r\n#get list of filenames into array to process\r\nfilenames = os.listdir(filenames_dir)\r\nfilenames.sort()\r\n \r\n#for ec, vector: get the sequence, rev comp of the sequence, in preparation for checking\r\nec = SeqIO.read(ec_filename, \"fasta\")\r\nec_rc = ec.reverse_complement()\r\nvector = SeqIO.read(vector_filename, \"fasta\")\r\nvector_rc = vector.reverse_complement()\r\n\r\n\r\n#prep output file\r\noutfile = open(filenames_dir + \"results_Ec_or_pJC8.txt\", \"w\")\r\noutfile.write(\"filename \\ttotal \\tboth \\tEc \\tvector \\tunaccounted \\n\")\r\n\r\n\r\n#process each file \r\nfor filename in filenames:\r\n \r\n #check whether each read in the file is from pJC8 or Ec or both; should not be any unaccounted, but track in case\r\n both_count = 0\r\n ec_count = 0\r\n vector_count = 0\r\n unaccounted = 0\r\n total = 0\r\n unaccounted_file = open(filenames_dir + filename + \"_unaccounted_reads\", \"w\")\r\n \r\n for seq_record in SeqIO.parse(filenames_dir + filename, \"fasta\"):\r\n total = total + 1\r\n \r\n #if seq in both\r\n if (seq_record.seq in ec.seq or seq_record.seq in ec_rc.seq):\r\n ec_count = ec_count + 1\r\n if (seq_record.seq in vector.seq or seq_record.seq in vector_rc.seq):\r\n vector_count = vector_count + 1\r\n both_count = both_count + 1\r\n \r\n elif (seq_record.seq in vector.seq or seq_record.seq in vector_rc.seq):\r\n vector_count = vector_count + 1\r\n \r\n #this shouldn't happen\r\n else:\r\n unaccounted = unaccounted + 1\r\n SeqIO.write(seq_record, unaccounted_file, \"fasta\")\r\n\r\n\r\n #write to output file: filename, total num reads, num Ec reads, num pjc8 reads\r\n output_line = filename + \"\\t\" + str(total) + \"\\t\" + str(both_count) + \"\\t\" + str(ec_count) + \"\\t\" + str(vector_count) + \"\\t\" + str(unaccounted) + \"\\n\"\r\n outfile.write(output_line)\r\n"
},
{
"alpha_fraction": 0.5561056137084961,
"alphanum_fraction": 0.566006600856781,
"avg_line_length": 25.933332443237305,
"blob_id": "f76566baf43c4a46e228f5e71f4fc247379269c0",
"content_id": "71648b44d04993e62236914eb9a492721de421c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1212,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 45,
"path": "/2015_lam_microbiome/calculate_percent_gc_batch_v1.py",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom Bio import SeqIO\nimport sys\nimport os\n\n\n\n#function to calc percent gc from all seqs in a fasta file \ndef get_gc(files_dir, filename):\n \n #track number of each base\n bases = {'A':0, 'C':0, 'G':0, 'T':0}\n \n #open the reads file; for each FASTA sequence, track bases in seq\n for seq_record in SeqIO.parse(files_dir + filename, \"fasta\"):\n for base in seq_record.seq:\n if base == 'A':\n bases['A'] = bases['A'] + 1\n elif base == 'C':\n bases['C'] = bases['C'] + 1\n elif base == 'G':\n bases['G'] = bases['G'] + 1\n else:\n bases['T'] = bases['T'] + 1\n \n #do the stats\n total_bases = float(sum(bases.values()))\n gc = (bases['G'] + bases['C']) / total_bases * 100\n return gc\n\n\n#input file in fasta\nfilenames_dir = sys.argv[1]\nfilenames = os.listdir(filenames_dir)\nfilenames.sort()\n\n#summary file\nresults_file = open(filenames_dir + \"summary.txt\", \"w\")\nresults_file.write(\"filename \\t%GC \\n\")\n\n#process each file\nfor filename in filenames:\n gc = get_gc(filenames_dir, filename)\n output = filename + \"\\t\" + str(gc) + \"\\n\"\n results_file.write(output)\n"
},
{
"alpha_fraction": 0.6540632247924805,
"alphanum_fraction": 0.6613995432853699,
"avg_line_length": 25.606060028076172,
"blob_id": "f8bd6758dbb49aec622adc3e0436cdc7ad6dc262",
"content_id": "9a67ba8881c902524c951127f9c7372cde922e47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1772,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 66,
"path": "/2015_lam_frontiers/get_phyla_count_from_otu_table_v1.py",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "import sys\nimport os\n\n\n#get to working dir and set file name\n#os.chdir(\"/home/kathy/Dropbox/PhD/Data_Analysis/2015_Frontiers/8_otu_bias_analysis/count_phlya_abundance\")\notu_filename = sys.argv[1]\n\n#prep outfile\nphyla_filename = os.path.splitext(otu_filename)[0] + \"_phyla_percent.txt\"\nphyla_file = open(phyla_filename, \"w\")\n\n#get otu table\notu_file = open(otu_filename, \"r\")\n\n#discard first header line\notu_file.readline()\n\n#start dict to keep phyla counts\ncosmid = {}\nbulk = {}\n\n#process each line, adding to both dicts\nfor line in otu_file:\n line = line.split(\",\")\n bulk_count = int(line[1])\n cosmid_count = int(line[2])\n phylum = line[4]\n \n #check if phylum in either dict and add accordingly\n if phylum in cosmid:\n cosmid[phylum] = cosmid[phylum] + cosmid_count\n bulk[phylum] = bulk[phylum] + bulk_count\n else:\n cosmid[phylum] = cosmid_count\n bulk[phylum] = bulk_count\n\n#given a dictionary of phyla counts, return dict of phyla fractions \ndef get_phyla_fractions(phyla_dict):\n \n #get total member count \n total = 0\n for phylum in phyla_dict:\n total = total + phyla_dict[phylum]\n total = float(total)\n \n #make new dict of fractions\n new_dict = {}\n for phylum in phyla_dict:\n new_dict[phylum] = phyla_dict[phylum]/total\n \n return new_dict\n \ncosmid_fraction = get_phyla_fractions(cosmid)\nbulk_fraction = get_phyla_fractions(bulk)\n\n#write phyla fractions to new file\nfor item in cosmid_fraction:\n phyla_file.write(item)\n phyla_file.write(\"\\t\")\n phyla_file.write(str(format(cosmid_fraction[item], '.9f')))\n phyla_file.write(\"\\t\")\n phyla_file.write(str(format(bulk_fraction[item], '.9f')))\n phyla_file.write(\"\\n\")\n\nphyla_file.close()\n\n \n \n \n"
},
{
"alpha_fraction": 0.6373748779296875,
"alphanum_fraction": 0.6707452535629272,
"avg_line_length": 33.46154022216797,
"blob_id": "29c0b612e6d650ea683dad040499de30f65342bc",
"content_id": "e4e78479ff6aa8cccd3a0bf4eb73b9df26ceacf4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 899,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 26,
"path": "/2014_lam_plos/make_heatmap_homology-similarity_73-clones_v1.R",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "#install.packages(\"gplots\")\nlibrary(gplots)\n\nsetwd(\"D:\\\\Dropbox\\\\PhD\\\\Manuscripts\\\\Pooled Sequencing\\\\Resubmission\\\\Homology analysis\")\nx <- read.csv(\"out.csv\", header=FALSE, sep=\",\", as.is=TRUE)\nx\n\n#get names\nx.names <- sort(unique(x$V1))\nx.names\n\n# create a matrix of the right size and put names on it\nx.sim <- matrix(0, length(x.names), length(x.names))\ndimnames(x.sim) <- list(x.names, x.names)\n\n# create indices by converting names to numbers and create the normal and reversed to fill in all the matrix\nx.ind <- rbind(cbind(match(x[[1]], x.names), match(x[[2]], x.names)), cbind(match(x[[2]], x.names), match(x[[1]], x.names)))\nx.sim[x.ind] <- rep(x[[3]], 2)\nx.sim\n\n#heatmap\npar(mar=c(5,5))\nx.sim.matrix = as.matrix(x.sim)\nlwid=c(1.5,4.5)\t\nlhei = c(0.75,4.5)\nheatmap.2(x.sim.matrix, trace=\"none\", lwid=lwid, lhei=lhei, density.info=c(\"none\"), cexRow=0.75, cexCol=0.75, margins=c(6.5,6.5))\n\n\n\n"
},
{
"alpha_fraction": 0.611190140247345,
"alphanum_fraction": 0.626363217830658,
"avg_line_length": 25.038461685180664,
"blob_id": "eb19e6b975786a1862fdda5461e5d6f1e58ec302",
"content_id": "71f87fa828ba4f4837b89710163168cee14107e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2109,
"license_type": "no_license",
"max_line_length": 159,
"num_lines": 78,
"path": "/2015_lam_microbiome/find_all_consensus_promoters_batch_v1.py",
"repo_name": "itskathylam/phd",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\r\n\r\nfrom Bio import SeqIO\r\nimport sys\r\nimport os\r\nimport re\r\n\r\n\r\n\r\n#FUNCTIONS\r\n\r\n#look for consensus sequences 1 promoter; return count\r\ndef find_one_consensus(sequence, filename):\r\n \r\n #compile regex\r\n p = re.compile(sequence)\r\n count = 0\r\n \r\n #iterate through each fasta sequence\r\n for seq_record in SeqIO.parse(filename, \"fasta\"):\r\n \r\n #check the sequence\r\n for match in p.finditer(str(seq_record.seq)):\r\n count = count + 1\r\n \r\n #check the reverse complement\r\n for match in p.finditer(str(seq_record.reverse_complement().seq)):\r\n count = count + 1\r\n \r\n return count\r\n\r\n#look for consensus sequences for 5 promoters; return a string to be printed to file\r\ndef find_all_consensus(files_dir, reads_filename):\r\n \r\n #file location\r\n location = files_dir + reads_filename\r\n \r\n #rpoD sigma 70\r\n rpod_count = find_one_consensus(\"TTGACA.{15,19}TATAAT\", location)\r\n \r\n #rpoE sigma 24\r\n rpoe_count = find_one_consensus(\"GGAACTT.{15,19}TCAAA\", location) \r\n \r\n #rpoH sigma 32\r\n rpoh_count = find_one_consensus(\"TTG[AT][AT][AT].{13,14}CCCCAT[AT]T\", location) \r\n \r\n #rpoN sigma 54\r\n rpon_count = find_one_consensus(\"TGGCA.{7}TGC\", location) \r\n \r\n #Bacteroides sigma AB\r\n bacteroides_count = find_one_consensus(\"TTTG.{19,21}TA.{2}TTTG\", location) \r\n \r\n output = filename + \"\\t\" + str(rpod_count) + \"\\t\" + str(rpoe_count) + \"\\t\" + str(rpoh_count) + \"\\t\" + str(rpon_count)+ \"\\t\" + str(bacteroides_count) + \"\\n\"\r\n return output\r\n\r\n\r\n\r\n#INPUT FILES\r\n\r\nfilenames_dir = sys.argv[1]\r\nfilenames = os.listdir(filenames_dir)\r\nfilenames.sort()\r\n\r\n\r\n#PROCESS ALL FILES\r\n\r\n#write summary file of results\r\nsummary_file = open(filenames_dir + \"summary.txt\", \"w\")\r\nsummary_file.write(\"filename \\trpoD reads \\trpoE \\trpoH \\trpoN \\tBacteroides \\n\")\r\n\r\n#process files\r\nfor filename in filenames:\r\n \r\n #get sets of read names that are hits\r\n output = find_all_consensus(filenames_dir, filename)\r\n \r\n #write to summary\r\n summary_file.write(output)\r\n"
}
] | 7 |
aousssbai/roboticscomp312p | https://github.com/aousssbai/roboticscomp312p | 4970bf25da05987070b660252dc871a20c690026 | 11812f049fd915da7d9378e5a816a7109114f58b | c13a1c10794971734a371db8d470bd49f7456ceb | refs/heads/master | 2021-09-05T12:53:55.291712 | 2018-01-27T19:16:54 | 2018-01-27T19:16:54 | 118,805,089 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4109768867492676,
"alphanum_fraction": 0.4734768867492676,
"avg_line_length": 18.937171936035156,
"blob_id": "6575d45ad845a18b4f8ae6bb9136ddf75767901e",
"content_id": "d7267d072dcf5531eac4d6a10e0d924fa09ac551",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3808,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 191,
"path": "/lab2.py",
"repo_name": "aousssbai/roboticscomp312p",
"src_encoding": "UTF-8",
"text": "import sys\nimport linecache\nx = []\ny = []\nangle = []\n\n\n\n#we get the coordinates from a given waypoint position\n\ndef cutCoord(initialPos): #===============================================================================\n\n prevCount = 0\n angle=[]\n x=[]\n y=[]\n for i in range(0, len(initialPos)):\n\n if (initialPos[i] == \" \"):\n prevCount = i + 1\n break\n\n else :\n x.append(initialPos[i])\n\n\n for i in range(prevCount, len(initialPos)):\n\n if (initialPos[i] == \" \"):\n prevCount = i+1\n break\n\n else :\n y.append(initialPos[i])\n\n\n\n for i in range(prevCount, len(initialPos)):\n\n if (initialPos[i] == \" \"):\n break\n\n else :\n angle.append(initialPos[i])\n\n if (angle[len(angle)-1] == '\\n'):\n del angle[len(angle)-1]\n\n\n\n\n return x, y, angle\n\n\n\n\ndef convertCoord(list): #======================================================================\n\n x1=list[0]\n y1=list[1]\n angle1=list[2]\n\n\n if (len(x1) == 1):\n xVal = int(x1[0])\n\n elif (len(x1) == 2):\n if (x1[0] == '-'):\n xVal = int(x1[1])*(-1)\n\n else:\n xVal = int(x1[0])*10 + int(x1[1])\n\n\n\n elif (len(x1) == 3):\n if (x1[0]=='-'):\n xVal = (int(x1[1])*10 + int(x1[2]))*(-1)\n\n else:\n xVal = int(x1[0])*100 + int(x1[1])*10 + int(x1[2])\n\n elif (len(x1) == 4):\n if (x1[0]=='-'):\n xVal = (int(y1[1])*100 + int(x1[2])*10 + int(x1[3]))*(-1)\n\n else:\n xVal = int(x1[0])*1000 + int(x1[1])*100 + int(x1[2])*10 + int(x1[3])\n\n\n if (len(y1) == 1):\n yVal = int(y1[0])\n\n\n elif (len(y1) == 2):\n if (y1[0] == '-'):\n yVal = int(y1[1])*(-1)\n\n else:\n yVal = int(y1[0])*10 + int(y1[1])\n\n\n elif (len(y1) == 3):\n if (y1[0]=='-'):\n yVal = (int(y1[1])*10 + int(y1[2]))*(-1)\n\n else:\n yVal = int(y1[0])*100 + int(y1[1]) *10 + int(y1[2])\n\n elif (len(y1) == 4):\n if (y1[0]=='-'):\n yVal = (int(y1[1])*100 + int(y1[2])*10 + int(y1[3]))*(-1)\n\n else:\n yVal = int(y1[0])*1000 + int(y1[1])*100 + int(y1[2])*10 + int(y1[3])\n\n\n\n\n\n if (len(angle1) == 1):\n angleVal = int(angle1[0])\n\n\n elif (len(angle1) == 2):\n if (angle1[0] == '-'):\n angleVal = int(angle1[1])*(-1)\n\n else:\n angleVal = int(angle1[0])*10 + int(angle1[1])\n\n\n elif (len(angle1) == 3):\n if (angle1[0]=='-'):\n angleVal = (int(angle1[1])*10 + int(angle1[2]))*(-1)\n\n else:\n angleVal = int(angle1[0])*100 + int(angle1[1])*10 + int(angle1[2])\n\n\n elif (len(angle1) == 4):\n if (angle1[0]=='-'):\n angleVal = (int(angle1[1])*100 + int(angle1[2])*10 + int(angle1[3]))*(-1)\n\n else:\n angleVal = int(angle1[0])*1000 + int(angle1[1])*100 + int(angle1[2])*10 + int(angle1[3])\n\n\n return xVal, yVal, angleVal\n\n\n\n#get the initial position of the robot by reading the first waypoint of the input file\n\nargNum = len(sys.argv)\n\nif (argNum == 2):\n\n print(\"STARTED\")\n\nelse:\n\n print (\"no file specified\")\n exit(1)\n\n\ninitialPosition = open(sys.argv[1], 'r').readline().rstrip()\nsecondPos = linecache.getline(sys.argv[1], 2)\n\n\ncurrentCoord = convertCoord(list(cutCoord(initialPosition)))\nnextCoord = convertCoord(list(cutCoord(secondPos)))\n\nprint(currentCoord)\nprint(nextCoord)\n\n\n\n\nnum_lines = sum(1 for line in open(sys.argv[1], 'r'))\n\nwaypointsList = []\n\nfor i in range(1,num_lines+1):\n pos = linecache.getline(sys.argv[1],i)\n i= convertCoord(list(cutCoord(pos)))\n waypointsList.append(i)\n\n\n\n# maintenant je dois resoudre l'equation pour savoir quels sont les params a rentrer pour aller au second waypoint\n"
}
] | 1 |
MorbidMiyako/Hash-tables-I | https://github.com/MorbidMiyako/Hash-tables-I | e88f80dcd7b37ce9a946c5f78e22255977e5fb95 | 7909cd13cd6a0ca792e3dfbd77365d78a5586f85 | dc1f10859412b7c3c2795e51c52c2e63c4187a83 | refs/heads/master | 2022-11-25T17:17:36.286212 | 2020-08-03T23:37:32 | 2020-08-03T23:37:32 | 282,240,612 | 0 | 0 | null | 2020-07-24T14:28:26 | 2020-07-24T14:29:23 | 2020-08-03T23:37:33 | Python | [
{
"alpha_fraction": 0.5114753842353821,
"alphanum_fraction": 0.5173770785331726,
"avg_line_length": 28.326923370361328,
"blob_id": "bdf1689eefd95956a6b752421ec1911c08bcb6bf",
"content_id": "b5d7c7f997a0ffe0bfbfbc8dc08f3a40e687c490",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1525,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 52,
"path": "/applications/word_count/word_count.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "def word_count(s):\n # Your code here\n ignoredLetters = [\"\\\"\", \":\", \";\", \",\", \".\", \"-\", \"+\", \"=\",\n \"/\", \"\\\\\", \"|\", \"[\", \"]\", \"{\", \"}\", \"(\", \")\", \"*\", \"^\", \"&\", \"?\", \"!\"]\n\n replaceLetters = [\"\\n\", \"\\r\", \"\\t\"]\n\n longest_word = 0\n\n lowercase_and_cleaned = \"\"\n\n for letter in s:\n if letter not in ignoredLetters:\n if letter not in replaceLetters:\n lowercase_and_cleaned += letter.lower()\n else:\n lowercase_and_cleaned += \" \"\n\n wordsArray = lowercase_and_cleaned.split(\" \")\n wordsDict = {}\n\n print(wordsArray)\n\n for words in wordsArray:\n words = str(words)\n if words in wordsDict:\n wordsDict[words] += 1\n else:\n if len(words) > longest_word:\n longest_word = len(words)\n wordsDict[words] = 1\n\n del wordsDict[\"\"]\n print(wordsDict)\n return wordsDict\n\n # wordsList = list(wordsDict.items())\n # # print(wordsList.sort(key=lambda e: e[0]))\n # wordsList.sort()\n # wordsList.sort(key=lambda e: e[1], reverse=True)\n\n # for wordTupels in wordsList:\n # spaces = (longest_word+2-len(wordTupels[0]))*\" \"\n # print(f\"{wordTupels[0]}{spaces}{wordTupels[1]}\")\n\n\nif __name__ == \"__main__\":\n print(word_count(\"\"))\n print(word_count(\"Hello\"))\n print(word_count('Hello, my cat. And my cat doesn\\'t say \"hello\" back.'))\n print(word_count(\n 'This is a test of the emergency broadcast network. This is only a test.'))\n"
},
{
"alpha_fraction": 0.37613919377326965,
"alphanum_fraction": 0.39188069105148315,
"avg_line_length": 20.945453643798828,
"blob_id": "fc7b55ffa887c4a16464f7c77fbae8f4387f84a4",
"content_id": "770a7ed586d6f5acf7070e41e3073e5ffc6c2927",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1207,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 55,
"path": "/applications/sumdiff/sumdiff.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "\"\"\"\nfind all a, b, c, d in q such that\nf(a) + f(b) = f(c) - f(d)\n\"\"\"\n\n# q = set(range(1, 10))\nq = set(range(1, 100))\n# q = (1, 3, 4, 7, 12)\n\n\ndef function(x):\n return x * 4 + 6\n\n\n# Your code here\ncalculated = {}\nsolutionStringsArray = []\n\n\nfor a in q:\n e = 0\n if a in calculated:\n e = calculated[a]\n else:\n e = function(a)\n calculated[a] = e\n\n for b in q:\n f = 0\n if b in calculated:\n f = calculated[b]\n else:\n f = function(b)\n calculated[b] = f\n\n for c in q:\n g = 0\n if c in calculated:\n g = calculated[c]\n else:\n g = function(c)\n calculated[c] = g\n\n for d in q:\n h = 0\n if d in calculated:\n h = calculated[d]\n else:\n h = function(d)\n calculated[d] = h\n if e + f == g - h:\n solutionStringsArray.append([a, b, c, d])\n # solutionStringsArray.append(\n # f\"f({a}) + f({b}) = f({c}) - f({d}) {e} + {f} = {g} - {h}\")\nprint(solutionStringsArray)\n"
},
{
"alpha_fraction": 0.5470527410507202,
"alphanum_fraction": 0.5542916059494019,
"avg_line_length": 23.794872283935547,
"blob_id": "6a34f89c94e7e2689eaf15fe56c168e1b2b6ebdc",
"content_id": "dea13efed1a42c92d069015ae378513f5b07bd97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 967,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 39,
"path": "/applications/histo/histo.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "# Your code here\n\n\nf = open('robin.txt', 'r')\n\ncompleteCipher = f.read()\n\n\nignoredLetters = [\"\\\"\", \":\", \";\", \",\", \".\", \"-\", \"+\", \"=\",\n \"/\", \"\\\\\", \"|\", \"[\", \"]\", \"{\", \"}\", \"(\", \")\", \"*\", \"^\", \"&\", \"\\n\", \"\\r\", \"?\", \"!\"]\n\nlongest_word = 0\n\nlowercase_and_cleaned = \"\"\n\nfor letter in completeCipher:\n if letter not in ignoredLetters:\n lowercase_and_cleaned += letter.lower()\n\nwordsArray = lowercase_and_cleaned.split(\" \")\nwordsDict = {}\n\nfor words in wordsArray:\n words = str(words)\n if words in wordsDict:\n wordsDict[words] += \"#\"\n else:\n if len(words) > longest_word:\n longest_word = len(words)\n wordsDict[words] = \"#\"\n\nwordsList = list(wordsDict.items())\n# print(wordsList.sort(key=lambda e: e[0]))\nwordsList.sort()\nwordsList.sort(key=lambda e: e[1], reverse=True)\n\nfor wordTupels in wordsList:\n spaces = (longest_word+2-len(wordTupels[0]))*\" \"\n print(f\"{wordTupels[0]}{spaces}{wordTupels[1]}\")\n"
},
{
"alpha_fraction": 0.38789546489715576,
"alphanum_fraction": 0.4126547574996948,
"avg_line_length": 21.369230270385742,
"blob_id": "6ea32910f2fc02e06294508b218973cf9ae20987",
"content_id": "06542e40e2e95171a9765ce6af1f847da57e2b5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1454,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 65,
"path": "/applications/expensive_seq/expensive_seq.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "x_cache = {\n\n}\n\ny_cache = {\n\n}\n\nz_cache = {\n\n}\n\nxyz_cache = {\n\n}\n\n\ndef expensive_seq(x, y, z):\n if f\"{x},{y},{z}\" in xyz_cache:\n return xyz_cache[f\"{x},{y},{z}\"]\n else:\n return_value = 0\n if x <= 0:\n return_value = y+z\n xyz_cache[f\"{x},{y},{z}\"] = return_value\n return return_value\n else:\n x_return = []\n y_return = []\n z_return = []\n\n if x in x_cache:\n x_return = x_cache[x]\n else:\n x_return = [x-1, x-2, x-3]\n x_cache[x] = x_return\n\n if y in y_cache:\n y_return = y_cache[y]\n else:\n y_return = [y+1, y+2, y+3]\n y_cache[y] = y_return\n\n if z in z_cache:\n z_return = z_cache[z]\n else:\n z_return = [z, z*2, z*3]\n z_cache[z] = z_return\n\n return_value = expensive_seq(x_return[0], y_return[0], z_return[0]) + expensive_seq(\n x_return[1], y_return[1], z_return[1]) + expensive_seq(x_return[2], y_return[2], z_return[2])\n\n xyz_cache[f\"{x},{y},{z}\"] = return_value\n\n return return_value\n\n\nif __name__ == \"__main__\":\n for i in range(10):\n x = expensive_seq(i*2, i*3, i*4)\n if x is None:\n print(\"ehm..\")\n print(f\"{i*2} {i*3} {i*4} = {x}\")\n\n print(expensive_seq(150, 400, 800))\n"
},
{
"alpha_fraction": 0.5446466207504272,
"alphanum_fraction": 0.553533673286438,
"avg_line_length": 25.550561904907227,
"blob_id": "115aff58b03b58927832091639d8e8237a034b3e",
"content_id": "24c00f74aa3d89d816652839a13e95a051774e2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2363,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 89,
"path": "/applications/markov/markov.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "import random\n\n# Read in all the words in one go\nwith open(\"input.txt\") as f:\n text = f.read()\n\n# TODO: analyze which words can follow other words\n\"\"\"\ndict = {\n word : [follows]\n}\n\ngo trough array of words: (for i in range(wordArray -1))\n for each:\n dict[wordArray[i]].append[wordArray[i+1]]\n\"\"\"\n\nnewText = \"\"\ntoRemove = [\"\\n\", \"\\r\"]\n\nfor letter in text:\n if letter not in toRemove:\n newText += letter\n\nwordsArray = newText.split(\" \")\n\nendLetters = [\".\", \"!\", \"?\"]\n\nwordDict = {}\nstartWordArray = []\nendWordArray = []\n\n\ndef randomSentence():\n\n for i in range(len(wordsArray)-1):\n # if i < 60:\n # print(wordsArray[i])\n # if wordsArray[i][len(wordsArray[i])-1] == \"\\n\" or wordsArray[i][len(wordsArray[i])-1] == \"\\r\":\n # print(\"_____________\")\n # print(\"_____________\")\n # print(\"_____________\")\n # print(\"_____________\")\n # wordsArray[i] = wordsArray[:-1]\n # print(wordsArray[i])\n # if wordsArray[i][len(wordsArray[i])-1] == \"n\":\n # if wordsArray[i][len(wordsArray[i])-1] == \"r\":\n # if wordsArray[i][len(wordsArray[i])-1] == \"\\\"\":\n\n if wordsArray[i][len(wordsArray[i])-1] in endLetters:\n endWordArray.append(wordsArray[i])\n\n if wordsArray[i][len(wordsArray[i])-1] == \"\\\"\":\n if wordsArray[i][len(wordsArray[i])-2] in endLetters:\n endWordArray.append(wordsArray[i])\n\n if wordsArray[i][0].isupper():\n startWordArray.append(wordsArray[i])\n\n if wordsArray[i][0] == \"\\\"\":\n if wordsArray[i][1].isupper():\n startWordArray.append(wordsArray[i])\n\n if wordsArray[i] in wordDict:\n wordDict[wordsArray[i]].append(wordsArray[i+1])\n else:\n wordDict[wordsArray[i]] = [wordsArray[i+1]]\n\n lastWord = random.choice(startWordArray)\n sentenceArray = [lastWord]\n while lastWord not in endWordArray:\n # print(lastWord)\n # print(lastWord in endWordArray)\n lastWord = random.choice(wordDict[lastWord])\n sentenceArray.append(lastWord)\n\n print(\"\\n\")\n print(\" \".join(sentenceArray))\n\n# print(wordsArray)\n# print(startWordArray)\n# print(endWordArray)\n\n\n# TODO: construct 5 random sentences\n# Your code here\n\nfor i in range(5):\n randomSentence()\n"
},
{
"alpha_fraction": 0.5559502840042114,
"alphanum_fraction": 0.5621669888496399,
"avg_line_length": 21.520000457763672,
"blob_id": "cbeae8b094774c1ad04c8349f90a192d4eca0fb4",
"content_id": "1101cbc9dfc2d12287779824435b032ca9d00193",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1131,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 50,
"path": "/applications/crack_caesar/crack_caesar.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "# Use frequency analysis to find the key to ciphertext.txt, and then\n# decode it.\n\n# Your code here\n\nf = open('ciphertext.txt', 'r')\n\ncompleteCipher = f.read()\n\nlettersDict = {}\n\nignoredLetters = [\" \", \",\", \".\", \"\\'\", \"\\n\", \"\\\"\", ';',\n ':', '-', '?', '!', 'â', '€', '”', '(', '1', ')']\n\ntotalLetters = 0\n\nvalueLetterPairs = []\n\nfor letter in completeCipher:\n if letter in ignoredLetters:\n pass\n elif letter in lettersDict:\n lettersDict[letter] += 1\n else:\n lettersDict[letter] = 1\n\nlettersList = list(lettersDict.items())\n\nlettersList.sort(key=lambda e: e[1], reverse=True)\n\nnewLettersDict = dict(lettersList)\n\nfrequencyOrder = ['E', 'T', 'A', 'O', 'H', 'N', 'R', 'I', 'S', 'D', 'L',\n 'W', 'U', 'G', 'F', 'B', 'M', 'Y', 'C', 'P', 'K', 'V', 'Q', 'J', 'X', 'Z']\n\ncounter = 0\n\nfor letter in newLettersDict:\n newLettersDict[letter] = counter\n counter += 1\n\ndecodedText = \"\"\n\nfor letter in completeCipher:\n if letter in newLettersDict:\n decodedText += frequencyOrder[newLettersDict[letter]]\n else:\n decodedText += letter\n\nprint(decodedText)\n"
},
{
"alpha_fraction": 0.5441558361053467,
"alphanum_fraction": 0.5467532277107239,
"avg_line_length": 25.55172348022461,
"blob_id": "903d98c265469115e3bde19ad0b0760722243686",
"content_id": "e9b497f9eafd3d8afc272ae7b74557dbbb346dad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 770,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 29,
"path": "/applications/no_dups/no_dups.py",
"repo_name": "MorbidMiyako/Hash-tables-I",
"src_encoding": "UTF-8",
"text": "def no_dups(s):\n # Your code here\n # set automatically orders ;-;\n \"\"\"\n if len(s.split(\" \")) <= 1:\n print(s)\n return s\n print(str(\" \".join(list(set(s.split(\" \"))))))\n return str(\" \".join(list(set(s.split(\" \")))))\n \"\"\"\n wordsArray = s.split(\" \")\n returnArray = []\n\n if len(wordsArray) <= 1:\n print(s)\n return s\n for word in wordsArray:\n if word not in returnArray:\n returnArray.append(word)\n print(\" \".join(returnArray))\n return \" \".join(returnArray)\n\n\nif __name__ == \"__main__\":\n print(no_dups(\"\"))\n print(no_dups(\"hello\"))\n print(no_dups(\"hello hello\"))\n print(no_dups(\"cats dogs fish cats dogs\"))\n print(no_dups(\"spam spam spam eggs spam sausage spam spam and spam\"))\n"
}
] | 7 |
shravankoninti/git_test | https://github.com/shravankoninti/git_test | ce5bc31a3b75cdeeaa993eb73f5b9c4219680a6c | b2429c64fabd9b915e3879d7a5d900ba92f928e3 | 9126e06b5a3f92895bf1305425c22ac878acb414 | refs/heads/master | 2022-07-03T02:28:34.009429 | 2020-05-13T18:25:31 | 2020-05-13T18:25:31 | 263,700,729 | 0 | 0 | null | 2020-05-13T17:38:19 | 2020-05-13T18:25:35 | 2020-05-13T18:25:32 | Python | [
{
"alpha_fraction": 0.625,
"alphanum_fraction": 0.671875,
"avg_line_length": 19.66666603088379,
"blob_id": "65a60d40d8f21c41c0de502b54e92a06eed5986e",
"content_id": "d2fe6acd233483709243c23e4b824a73bf0724b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 64,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 3,
"path": "/test.py",
"repo_name": "shravankoninti/git_test",
"src_encoding": "UTF-8",
"text": "import pandas as pd\n\ndf = pd.read_csv(\"train.csv\", nrows=100)\n\n\n"
}
] | 1 |
GraysonScherm/Distributed-Internet-Service-Delivery | https://github.com/GraysonScherm/Distributed-Internet-Service-Delivery | 3c90b1188e7636467609efc8a6545894cda57588 | 95cec9ea0b038877436b49845e597e2cb9904a33 | 30f9fc6573ed3b29b9dad185bd1b0776c22399a5 | refs/heads/master | 2020-04-10T19:08:35.079873 | 2016-12-03T23:18:44 | 2016-12-03T23:18:44 | 68,052,876 | 0 | 2 | null | 2016-09-12T22:21:04 | 2016-09-23T23:21:26 | 2016-10-02T21:12:45 | Python | [
{
"alpha_fraction": 0.6183986663818359,
"alphanum_fraction": 0.6348665356636047,
"avg_line_length": 33.411766052246094,
"blob_id": "4665b1ffbe31b52c4d59f9f21cd61c1cc57e5580",
"content_id": "11a127c44a00f3afe51023eae60b4f1e6bd8c25a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1761,
"license_type": "permissive",
"max_line_length": 121,
"num_lines": 51,
"path": "/PropFair.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "import os\nimport re\nimport sqlite3\nimport GEController \n\ndef Propfair(GEvector,tVector):\n #green energy vector, Grid Energy vector , T is the previous scheduled memory\n tc=50\n NDC=len(GEvector)\n Metric=[0]*NDC # Vector of the metric we used for scheduling\n for i in range(0,NDC):\n Metric[i]=GEvector[i]/tVector[i]\n \n MAX=Metric.index(max(Metric)) #the index of the choosen one \n SClist=[0]*len(GEvector) #refresh the Schedule list\n SClist[MAX]=1 #The Data Center which is selected\n #for i in range(0,NDC):\n # lambdaList[i]=lambdaList[i]+SClist[i]\n for i in range(0,NDC):\n if SClist[i]==1:\n tVector[i]=(1.0-(1.0/tc))*tVector[i]+((1.0/tc))*GEvector[i]\n else:\n tVector[i]=(1.0-(1.0/tc))*tVector[i]\n \n print(\"---------***T VALUE*****---------\")\n print(tVector)\n #return SClist#, lambdaList\n return MAX, tVector\n\t\ndef fetchServerInfo():\n # serverInfo = re.split(';',line)\n # servers.append(serverInfo)\n fd = os.open(\"/soft/ryu/Distributed-Internet-Service-Delivery/controller.db\", os.O_RDONLY)\n conn = sqlite3.connect('/dev/fd/%d' % fd)\n os.close(fd)\n cursor = conn.cursor() \n currentEnergyValues = [0]*GEController.numberOfServers\n currentNumberOfUsers = [0]*GEController.numberOfServers\n\t\n for i in range(0, GEController.numberOfServers):\n\tcursor.execute(\"SELECT * from energyValues where id = (SELECT MAX(id) from energyValues where server = ?)\", str(i + 1))\n\tfetchedData = cursor.fetchall()\n if (fetchedData):\n\t currentEnergyValues[i] = fetchedData[0][1]\n else:\n\t currentEnergyValues[i] = 0\n\t currentNumberOfUsers[i] = 0\n \n\t\n\t\n return currentEnergyValues, currentNumberOfUsers\n\n\t\t\n\t\n"
},
{
"alpha_fraction": 0.6708727478981018,
"alphanum_fraction": 0.6961093544960022,
"avg_line_length": 30.683332443237305,
"blob_id": "7531afc89d034bb5b9ca40a41df19d0e178e17e8",
"content_id": "25b6013ef4c004d0d421910494189454e901cd8b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1902,
"license_type": "permissive",
"max_line_length": 126,
"num_lines": 60,
"path": "/controller.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "import socket\nimport sys\nimport sqlite3\nimport re\nfrom datetime import datetime\n\ndef insertInto (cur, recieved, source, id): #inserts data into the table\n energyValue, serverID, date, private_ip, numberOfActiveUsers = recieved\n value = float(energyValue)\n sID = int(serverID)\n\n print (id, value, source[0], sID, date, private_ip, int(numberOfActiveUsers))\n cur.execute (\"INSERT into energyValues(id, value, ip, server, time, private_ip, users_number) values (?, ?, ?, ?, ?, ?, ?)\", \n (id, value, source[0], sID, date, private_ip, int(numberOfActiveUsers)))\n #date_object = datetime.strptime (date, '%Y-%m-%d %H:%M:%S')\n \ndef listenTCP(TCP_IP, TCP_PORT, connLimit): #opens listening TCP ports\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.bind((TCP_IP, TCP_PORT))\n s.listen(connLimit)\n return s\n\ndef openDB(dbname): #opens database\n db = sqlite3.connect(dbname)\n cursor = db.cursor()\n return db, cursor\n\ndef runTCP(s, db, cursor, addressList, id): #accepts TCP connections from the addressList and calls insertion function\n BUFFER_SIZE = 1024\n while 1:\n conn, addr = s.accept()\n try:\n print 'Connection address:', addr\n if addr[0] not in addressList:\n break\n data = conn.recv(BUFFER_SIZE)\n if not data: break\n recieved = re.split(';', data) #split data into an array\n id += 1\n insertInto(cursor, recieved, addr, id)\n db.commit()\n conn.send(data) # echo\n except KeyboardInterrupt: #not sure if it works properly\n print \"Closing connection\"\n conn.close()\n db_conn.close()\n\ns = listenTCP('72.36.65.116', 5005, 3)\ndb, cursor = openDB('controller.db')\ncursor.execute(\"SELECT MAX(id) from energyValues\")\nid = cursor.fetchone()[0] #take the last id in the table\nif id == None:\n id = 0\nprint(id)\n\naddressList = ('172.17.4.5', '172.17.4.6', '172.17.4.7') #we will accept TCP only from these IP addresses\nrunTCP(s, db, cursor, addressList, id)\n\nconn.close()\ndb.close()\t\n"
},
{
"alpha_fraction": 0.6302083134651184,
"alphanum_fraction": 0.6468523740768433,
"avg_line_length": 40.4600944519043,
"blob_id": "ffa176712575d3b2bb3f4e789420f25aa9842e31",
"content_id": "bcab4a6a5c7111e8480205f2decae1e1c966b131",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8832,
"license_type": "permissive",
"max_line_length": 212,
"num_lines": 213,
"path": "/GEController.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "# Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nAn OpenFlow 1.0 L2 learning switch implementation.\n\"\"\"\n\nimport logging\nimport struct\nimport re\nimport sqlite3\nimport os\nimport sys\nfrom ryu.base import app_manager\nfrom ryu.controller import mac_to_port\nfrom ryu.controller import ofp_event\nfrom ryu.controller.handler import MAIN_DISPATCHER\nfrom ryu.controller.handler import CONFIG_DISPATCHER\nfrom ryu.controller.handler import set_ev_cls\nfrom ryu.ofproto import ofproto_v1_0\nfrom ryu.lib.mac import haddr_to_bin\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import ipv4\nfrom ryu.lib.packet import tcp\nfrom ryu.controller import dpset\nfrom netaddr import *\nfrom utils import *\nfrom ryu.lib.mac import haddr_to_bin\nfrom PropFair import *\n\n'''\nThis file is edited from Ryu example which is located at ryu/ryu/app/simple_switch.py.\nAccording to its licecse(please don't trust my reading and read it), we can modify and use it as long as we keep the old license and state we've change the code. --Joe\n'''\n\nFLOW_HARD_TIMEOUT = 30\nFLOW_IDLE_TIMEOUT = 10\n\nnumberOfServers = 0\n\n\nclass SimpleSwitch(app_manager.RyuApp):\n global numberOfServers\n __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))\n OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION]\n servers = [0,]\n with open(os.path.join(__location__, 'servers.conf')) as f:\n for line in f:\n serverInfo = re.split(';',line)\n servers.append(serverInfo)\n numberOfServers += 1 \n serverLoad = [0]*numberOfServers\n T = [1.0]*numberOfServers #previous scheduled memory\n mac_to_port = {}\n\n def __init__(self, *args, **kwargs):\n super(SimpleSwitch, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n\n \n def add_flow(self, datapath, match, act, priority=0, idle_timeout=0, flags=0, cookie=0):\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n mod = parser.OFPFlowMod(datapath=datapath, priority=priority, match=match, actions=act, flags=flags, idle_timeout=idle_timeout, cookie=cookie)\n datapath.send_msg(mod)\n\n def forward_packet(self, msg, port_list):\n\n datapath = msg.datapath\n ofproto = datapath.ofproto\n\n actions = []\n \n for p in port_list:\n actions.append( createOFAction(datapath, ofproto.OFPAT_OUTPUT, p) )\n\n # install a flow to avoid packet_in next time\n if ofproto.OFPP_FLOOD not in port_list:\n match = getFullMatch( msg )\n sendFlowMod(msg, match, actions, FLOW_HARD_TIMEOUT, FLOW_IDLE_TIMEOUT, msg.buffer_id)\n else :\n\n sendPacketOut(msg=msg, actions=actions, buffer_id=msg.buffer_id)\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev): \n dl_type_ipv4 = 0x0800\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocol(ethernet.ethernet)\n dst = eth.dst\n src = eth.src\n\tipv4_pkt = pkt.get_protocol(ipv4.ipv4)\n dpid = datapath.id\n\n\ttcp_sgm = pkt.get_protocol(tcp.tcp)\t\n\n\tif tcp_sgm:\n self.logger.info(\"packet in %s %s %s %s; TCP ports: source=%s and dest=%s\", dpid, ipv4_pkt.src, ipv4_pkt.dst, msg.in_port, tcp_sgm.src_port, tcp_sgm.dst_port)\n match = parser.OFPMatch (dl_type = dl_type_ipv4, nw_src=self.ipv4_to_int(ipv4_pkt.src), tp_src=tcp_sgm.src_port, nw_proto = 6)\n # self.logger.info(\"T: Server1 - %d, Server2 - %d, Server3 - %d \", self.T[0], self.T[1], self.T[2]) \n GEvector, lambdaList = fetchServerInfo()\n#\t MAX, self.T = Propfair(GEvector,0,lambdaList, self.T)\n\t self.logger.info(\"Calling Propfair\")\n print \"GEVector status:\"\n for i in range (0, numberOfServers):\n\t sys.stdout.write(\"Server\" + str(i+1) + \" = \" + str(GEvector[i]) + \" || \")\n print \" \"\n\t MAX, self.T = Propfair(GEvector,self.T)\n \t serverID = MAX+1 #scheduler()\n \n actions = [parser.OFPActionSetNwDst(self.ipv4_to_int(self.servers[serverID][1])), \n parser.OFPActionSetDlDst(haddr_to_bin(self.servers[serverID][2])), parser.OFPActionOutput(int(self.servers[serverID][0]))]\n self.serverLoad[serverID-1]+=1\n\t print \"Server\", serverID, \"is chosen for the client with IP/Port\", ipv4_pkt.src, tcp_sgm.src_port\n\t self.add_flow(datapath, match, actions, 1, 60, ofproto.OFPFF_SEND_FLOW_REM, serverID)\n \n #rewriting response header\n match = parser.OFPMatch (dl_type = dl_type_ipv4, nw_src=self.ipv4_to_int(self.servers[serverID][1]), \n nw_dst=self.ipv4_to_int(ipv4_pkt.src), tp_dst=tcp_sgm.src_port)\n actions = [ parser.OFPActionSetNwSrc (self.ipv4_to_int(ipv4_pkt.dst)), #REWRITE IP HEADER FOR TCP CONNECTION ESTABLISHMENT. rewriting eth is not needed parser.OFPActionSetDlSrc(haddr_to_bin(eth.dst)), \n parser.OFPActionOutput(ofproto.OFPP_NORMAL)]\n self.add_flow(datapath, match, actions, 3, 60)\n\n# self.logger.info(\"Current number of users: Server1 - %d, Server2 - %d, Server3 - %d\", lambdaList[0], lambdaList[1], lambdaList[2])\n self.logger.info(\"Flow installed\")\n print \"Current number of users on each server:\"\n for i in range(0, numberOfServers):\n sys.stdout.write(\"Server\" + str(i+1) + \" = \" + str(self.serverLoad[i]) + \" || \")\n print \" \"\n actions = []\n actions.append( createOFAction(datapath, ofproto.OFPAT_OUTPUT, int(self.servers[serverID][0])) ) \n sendPacketOut(msg=msg, actions=actions, buffer_id=msg.buffer_id)\n\t\n @set_ev_cls(ofp_event.EventOFPFlowRemoved, MAIN_DISPATCHER)\n def flow_removal_handler(self, ev):\n msg = ev.msg\n match = msg.match\n\treason = msg.reason\n self.logger.info(\"Client released serverID = %d\", msg.cookie)\n serverId = msg.cookie - 1\n\tif self.serverLoad[serverId] > 0:\n\t self.serverLoad[serverId]-=1\n\n\n\n def remove_table_flows(self, datapath, table_id, match, instructions):\n \"\"\"Create OFP flow mod message to remove flows from table.\"\"\"\n ofproto = datapath.ofproto\n flow_mod = datapath.ofproto_parser.OFPFlowMod(datapath=datapath, match=match, command=ofproto.OFPFC_DELETE, \n cookie=0, idle_timeout=0,out_port=65535, buffer_id=4294967295, flags=0, hard_timeout=0,priority=0, actions=[])\n return flow_mod\n\n @set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER)\n def _event_switch_enter_handler(self, ev):\n dl_type_ipv4 = 0x0800\n dl_type_arp = 0x0806\n dp = ev.dp\n ofproto = dp.ofproto\n parser = dp.ofproto_parser\n\n self.logger.info(\"Switch connected %s. Delete previous flows...\", dp)\n \n empty_match = parser.OFPMatch()\n instructions = []\n flow_mod = self.remove_table_flows(dp, 0,empty_match, instructions)\n dp.send_msg(flow_mod)\n\n self.logger.info(\"Install the default flows...\")\n\n # addressList = []\n # for server in self.servers:\n # addressList.append(server[1]) # process packets from servers normally\n # hwAddressList = ('02:71:2a:55:7f:98') #filter client packets\n actions = [parser.OFPActionOutput(ofproto.OFPP_NORMAL)]\n for i in range(1, len(self.servers)):\n match = parser.OFPMatch(dl_type = dl_type_ipv4, nw_src = self.servers[i][1])\n self.add_flow(dp, match, actions, 2, 0) \n# self.logger.info(\"Added l2 flow for address %s\", address)\n \n match = parser.OFPMatch(dl_type = dl_type_arp)#process arp packets normally\n self.add_flow(dp, match, actions, 100, 0)\n\n match = parser.OFPMatch ()\n actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER)]\n self.add_flow(dp, match, actions, 1, 0) #add miss flow\n\n self.logger.info(\"Added default rules for servers and miss-flow. Ready to work!\")\n\n def ipv4_to_int(self, string):\n \tip = string.split('.')\n \tassert len(ip) == 4\n \ti = 0\n \tfor b in ip:\n \t\tb = int(b)\n \ti = (i << 8) | b\n return i\n\n"
},
{
"alpha_fraction": 0.5255292654037476,
"alphanum_fraction": 0.5641344785690308,
"avg_line_length": 23.09375,
"blob_id": "5e13706c466f2b495f202451b646619b57fbce91",
"content_id": "8e0602465587ed43c2173f1293be0b176da5d1aa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 803,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 32,
"path": "/copy/PropFairtest.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "def Propfair(GEvector,Evector,T):\r\n tc = float(5)\r\n Dvector=[GEvector/T for GEvector,T in zip(GEvector,T)] #metric vector for decision making\r\n T=list([1,1,1])\r\n MAX=Dvector.index(max(Dvector))\r\n SClist=[0,0,0]#*len(GEvector) #refresh the Schedule list\r\n SClist[MAX]=1 #The Data Center which is selected\r\n print(SClist)\r\n for i in range(0,3): #len(GEvector)):\r\n\tif SClist[i]==1:\r\n \t h=float(T[i])\r\n # T[i]=((1-(1/tc))*T[i])+(((1/tc))*GEvector[i])\r\n\t h=float((1-(1/tc)))*float(h)\t\r\n else:\r\n T[i]=(1-(1/tc))*T[i]\r\n\r\n \r\n print(T)\r\n print(MAX)\r\n print(SClist)\r\n return SClist\r\n\r\nT=list([1,1,1])\r\n\r\nEvector=[1]*3\r\nSClist=[0]*3\r\n\r\n#print len(GEvector)\r\nGEvector=[2,1,1]\r\n\r\nfor i in range(0,10):\r\n SClist=Propfair(GEvector,Evector,T)\r\n"
},
{
"alpha_fraction": 0.6355511546134949,
"alphanum_fraction": 0.6693148016929626,
"avg_line_length": 28.617647171020508,
"blob_id": "30471462e0d5b4679f0d93411973fb553538eb05",
"content_id": "df5de2696ada5efb3cb37dae63b5d39cabad8903",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1007,
"license_type": "permissive",
"max_line_length": 165,
"num_lines": 34,
"path": "/server.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "import commands\nimport socket, sys\nimport datetime\nimport time, random\nimport subprocess\n\nTCP_IP = '192.1.242.154'\nTCP_PORT = 5005\nBUFFER_SIZE = 1024 #buffer size\n\nbs = 10\ninterval = sys.argv[2]\nserverID = sys.argv[1]\n\nif len(sys.argv) < 3:\n print (\"Enter the server id and time interval\")\n sys.exit(1)\n\nintf = 'eth1'\nintf_ip = commands.getoutput(\"ip address show dev \" + intf).split()\nintf_ip = intf_ip[intf_ip.index('inet') + 1].split('/')[0]\nprint intf_ip\n\nwhile True:\n gE = random.uniform(1,10)*bs\n ts = time.time()\n numberOfActiveUsers = subprocess.check_output(\"sudo netstat -anp | grep :80 | grep ESTABLISHED | wc -l 2>&1\", shell=True, stderr=subprocess.PIPE).split('\\n', 1)[0]\n MESSAGE = str(gE) + \";\" + serverID + \";\" + datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') + \";\" + intf_ip + \";\" + numberOfActiveUsers\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((TCP_IP, TCP_PORT))\n s.send(MESSAGE)\n s.close()\n print (MESSAGE)\n time.sleep(int(interval))\n"
},
{
"alpha_fraction": 0.5735930800437927,
"alphanum_fraction": 0.6363636255264282,
"avg_line_length": 21,
"blob_id": "152f347437c66e220510526ffcea6315e12edb94",
"content_id": "5b14d981774b86a4182e50bb91525b72ba9eb84d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 462,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 21,
"path": "/client_test.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "import commands\nimport socket, sys\nimport datetime\nimport time, random\n\nTCP_IP = 'server-1'\nTCP_PORT = 80\n\nBUFFER_SIZE = 1024\n\n\nwhile 1:\n start_time = time.time()\n for i in range (0,10):\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.bind(('0.0.0.0', 9080 + i))\n s.connect((TCP_IP, TCP_PORT))\n s.send('GET /file.bz2 /HTTP 1.1 \\r\\n\\r\\n')\n data = (s.recv(1000000))\n print \"File received! Time: \" + str(time.time() - start_time)\n s.close()\n"
},
{
"alpha_fraction": 0.5537037253379822,
"alphanum_fraction": 0.5824074149131775,
"avg_line_length": 28,
"blob_id": "a0ea94a1359438feb70071edf3865497b78335bb",
"content_id": "9e4be67f0244121746c6f7eab30ecfa7592022f1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1080,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 36,
"path": "/PropFairtest.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "def Propfair(GEvector,Evector,lambdaList,T):\r\n #green energy vector, Grid Energy vector , T is the previous scheduled memory\r\n tc=50\r\n NDC=len(GEvector)\r\n Metric=[0]*NDC # Vector of the metric we used for scheduling\r\n for i in range(0,NDC):\r\n Metric[i]=GEvector[i]/T[i]\r\n \r\n MAX=Metric.index(max(Metric)) #the index of the choosen one\r\n SClist=[0]*len(GEvector) #refresh the Schedule list\r\n SClist[MAX]=1 #The Data Center which is selected\r\n for i in range(0,NDC):\r\n lambdaList[i]=lambdaList[i]+SClist[i]\r\n for i in range(0,NDC):\r\n if SClist[i]==1:\r\n T[i]=(1-(1/tc))*T[i]+((1/tc))*GEvector[i]\r\n else:\r\n T[i]=(1-(1/tc))*T[i]\r\n \r\n print(T)\r\n print(Metric)\r\n print(MAX)\r\n print(SClist)\r\n print(lambdaList)\r\n print('----------------')\r\n return SClist, lambdaList, Metric, T\r\n\r\nT=[1]*3\r\nGEvector=[82,95,54]\r\nEvector=[1]*3\r\nSClist=[0]*3\r\nlambdaList=[0]*3\r\nfor i in range(0,10):\r\n SClist,lambdaList, Metric, T=Propfair(GEvector,Evector,lambdaList,T)\r\n\r\nx=input()\r\n"
},
{
"alpha_fraction": 0.6189513802528381,
"alphanum_fraction": 0.6218047738075256,
"avg_line_length": 26.213592529296875,
"blob_id": "47adfac95553058d00416f2378eee4efbdbbf287",
"content_id": "7ce8c937bfb7dc5c636ce67fafa59692ee4d01aa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8411,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 309,
"path": "/utils.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "import ConfigParser\nimport os\nimport sys\nfrom ryu.ofproto import ether\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import ipv4\nfrom ryu.lib.packet import tcp\nfrom ryu.lib.packet import udp\nfrom ryu.lib.packet import icmp\nfrom ryu.lib.packet import arp\nfrom ryu.lib.packet import vlan\nfrom ryu.lib import mac\n\ndef readConfigFile(filename) :\n config = None\n filename = os.path.expanduser(filename)\n if not os.path.exists(filename):\n \tsys.exit(-1)\n\n confparser = ConfigParser.RawConfigParser()\n try:\n \tconfparser.read(filename)\n except ConfigParser.Error as exc:\n print(\"Config file %s could not be parsed: %s\" % (filename, str(exc)))\n # Create a dictionary from the configuration\n # - each section is a key in the dictionary that it's value\n # is a dictionary with (key, value) pairs of configuration\n # parameters\n config = {}\n for sec in confparser.sections():\n \tconfig[sec] = {}\n for (key,val) in confparser.items(sec):\n \tconfig[sec][key] = val\n\n return config\n\ndef packetIsIP(message) :\n pkt = packet.Packet(message.data)\n\n ip = pkt.get_protocol(ipv4.ipv4)\n if ip is not None :\n return True\n return False\n\ndef packetIsARP(message) :\n pkt = packet.Packet(message.data)\n\n a = pkt.get_protocol(arp.arp)\n if a is not None :\n return True\n return False\n\ndef packetIsRequestARP(message) :\n pkt = packet.Packet(message.data)\n\n a = pkt.get_protocol(arp.arp)\n if a.opcode == arp.ARP_REQUEST :\n return True\n return False\n\ndef packetIsReplyARP(message) :\n pkt = packet.Packet(message.data)\n\n a = pkt.get_protocol(arp.arp)\n if a.opcode == arp.ARP_REPLY :\n\treturn True\n return False\n\ndef packetIsTCP(message) :\n pkt = packet.Packet(message.data)\n\n ip = pkt.get_protocol(ipv4.ipv4)\n if ip is not None and ip.proto == 6 :\n\treturn True\n return False\n\ndef packetDstIp(message, ipaddr) :\n if packetIsIP(message):\n\tpkt = packet.Packet(message.data)\n\tip = pkt.get_protocol(ipv4.ipv4)\n \tif not cmp(ip.dst, ipaddr):\n\t\treturn True\n return False\n\ndef packetSrcIp(message, ipaddr) :\n if packetIsIP(message):\n pkt = packet.Packet(message.data)\n ip = pkt.get_protocol(ipv4.ipv4)\n if not cmp(ip.src, ipaddr):\n return True\n return False\n\ndef packetDstTCPPort(message, tcpport) :\n if packetIsTCP(message) :\n\tpkt = packet.Packet(message.data)\n dsttcp = pkt.get_protocol(tcp.tcp)\n\tif dsttcp.dst_port == tcpport :\n\t\treturn True\n return False\n\ndef packetSrcTCPPort(message, tcpport) :\n if packetIsTCP(message) :\n pkt = packet.Packet(message.data)\n srctcp = pkt.get_protocol(tcp.tcp)\n\tif srctcp.src_port == tcpport :\n return True\n return False\n\ndef packetArpDstIp(message, ipaddr) :\n if packetIsARP(message):\n pkt = packet.Packet(message.data)\n a = pkt.get_protocol(arp.arp)\n if not cmp(a.dst_ip, ipaddr):\n return True\n return False\n\ndef packetArpSrcIp(message, ipaddr) :\n if packetIsARP(message):\n pkt = packet.Packet(message.data)\n a = pkt.get_protocol(arp.arp)\n if not cmp(a.src_ip, ipaddr):\n return True\n return False\n\ndef createArpRequest(message, ip):\n if not packetIsARP(message):\n \tprint(\"Packet is not ARP\")\n \treturn\n pkt = packet.Packet(message.data)\n origarp = pkt.get_protocol(arp.arp)\n a = arp.arp(\n \thwtype=origarp.hwtype,\n \tproto=origarp.proto,\n\tsrc_mac=origarp.src_mac,\n \tdst_mac=origarp.dst_mac,\n\thlen=origarp.hlen,\n \topcode=arp.ARP_REQUEST,\n \tplen=origarp.plen,\n\tsrc_ip=origarp.src_ip,\n\tdst_ip=ip\n\t)\n e = ethernet.ethernet(\n\tdst=mac.BROADCAST_STR,\n\tsrc=origarp.src_mac,\n\tethertype=ether.ETH_TYPE_ARP) \n p = packet.Packet()\n p.add_protocol(e)\n p.add_protocol(a)\n p.serialize()\n return p\n\ndef createArpReply(message, ip):\n if not packetIsARP(message):\n print(\"Packet is not ARP\")\n return\n pkt = packet.Packet(message.data)\n origarp = pkt.get_protocol(arp.arp)\n a = arp.arp(\n hwtype=origarp.hwtype,\n proto=origarp.proto,\n src_mac=origarp.src_mac,\n dst_mac=origarp.dst_mac,\n hlen=origarp.hlen,\n opcode=arp.ARP_REPLY,\n plen=origarp.plen,\n src_ip=ip,\n dst_ip=origarp.dst_ip\n )\n e = ethernet.ethernet(\n dst=origarp.dst_mac,\n src=origarp.src_mac,\n ethertype=ether.ETH_TYPE_ARP)\n p = packet.Packet()\n p.add_protocol(e)\n p.add_protocol(a)\n p.serialize()\n return p\n\ndef ipv4_to_int(string):\n\tip = string.split('.')\n \tassert len(ip) == 4\n \ti = 0\n \tfor b in ip:\n \t\tb = int(b)\n \ti = (i << 8) | b\n return i\n\ndef sendPacketOut( msg, actions, buffer_id=0xffffffff, data=None ):\n datapath = msg.datapath\n parser = datapath.ofproto_parser\n\n if buffer_id == 0xffffffff :\n out = parser.OFPPacketOut(\n datapath=datapath, buffer_id=buffer_id, in_port=msg.in_port,\n actions=actions, data=data)\n datapath.send_msg(out)\n else :\n out = parser.OFPPacketOut(\n datapath=datapath, buffer_id=buffer_id, in_port=msg.in_port,\n actions=actions)\n datapath.send_msg(out)\n\ndef getFullMatch( msg ):\n datapath = msg.datapath\n parser = datapath.ofproto_parser\n \n in_port=None\n dl_src=None\n dl_dst=None\n dl_vlan=None\n dl_vlan_pcp=None\n dl_type=None\n nw_tos=None\n nw_proto=None\n nw_src=None\n nw_dst=None\n tp_src=None\n tp_dst=None\n \n in_port = msg.in_port\n\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocol(ethernet.ethernet)\n\n dl_src = eth.src\n dl_dst = eth.dst\n dl_type = eth.ethertype\n\n vl = pkt.get_protocol(vlan.vlan)\n if vl is not None :\n dl_vlan = vl.vid\n dl_vlan_pcp = vl.pcp\n dl_type = vl.ethertype\n \n ip = pkt.get_protocol(ipv4.ipv4)\n if ip is not None :\n nw_src = ip.src\n nw_dst = ip.dst\n nw_proto = ip.proto\n nw_tos = ip.tos\n\n t = pkt.get_protocol(tcp.tcp)\n if t is not None :\n tp_src = t.src_port\n tp_dst = t.dst_port\n\n u = pkt.get_protocol(udp.udp) \n if u is not None :\n tp_src = u.src_port\n tp_dst = u.dst_port\n \n ic = pkt.get_protocol(icmp.icmp)\n if ic is not None :\n tp_src = ic.type\n tp_dst = ic.code\n \n a = pkt.get_protocol(arp.arp)\n if a is not None :\n nw_src = a.src_ip\n nw_dst = a.dst_ip\n nw_proto = a.opcode\n\n match = parser.OFPMatch( \n dl_src=mac.haddr_to_bin(dl_src), \n dl_dst=mac.haddr_to_bin(dl_dst), \n dl_vlan=dl_vlan, \n dl_vlan_pcp=dl_vlan_pcp, \n dl_type=dl_type, \n nw_tos=nw_tos, \n nw_proto=nw_proto, \n nw_src=ipv4_to_int(nw_src), \n nw_dst=ipv4_to_int(nw_dst), \n tp_src=tp_src, \n tp_dst=tp_dst,\n in_port=in_port )\n return match\n\ndef createOFAction(datapath, action_type, arg) :\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser \n\n if action_type == ofproto.OFPAT_OUTPUT : \n return parser.OFPActionOutput(arg)\n if action_type == ofproto.OFPAT_SET_DL_SRC : \n return parser.OFPActionSetDlSrc(mac.haddr_to_bin(arg))\n if action_type == ofproto.OFPAT_SET_DL_DST : \n return parser.OFPActionSetDlDst(mac.haddr_to_bin(arg))\n if action_type == ofproto.OFPAT_SET_NW_SRC : \n return parser.OFPActionSetNwSrc(ipv4_to_int(arg))\n if action_type == ofproto.OFPAT_SET_NW_DST : \n return parser.OFPActionSetNwDst(ipv4_to_int(arg))\n if action_type == ofproto.OFPAT_SET_TP_SRC : \n return parser.OFPActionSetTpSrc(arg)\n if action_type == ofproto.OFPAT_SET_TP_DST : \n return parser.OFPActionSetTpDst(arg)\n return None\n \ndef sendFlowMod(msg, match, actions, hard_timeout, idle_timeout, buffer_id=None):\n datapath = msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n\n mod = parser.OFPFlowMod(\n datapath=datapath, match=match, cookie=0,\n command=ofproto.OFPFC_ADD, idle_timeout=idle_timeout, hard_timeout=hard_timeout,\n priority=ofproto.OFP_DEFAULT_PRIORITY,\n flags=ofproto.OFPFF_SEND_FLOW_REM, actions=actions, buffer_id=buffer_id)\n datapath.send_msg(mod)\n\n\n"
},
{
"alpha_fraction": 0.6794582605361938,
"alphanum_fraction": 0.7291196584701538,
"avg_line_length": 27.600000381469727,
"blob_id": "a199bb9c9a09690010e59c81f6a47e087459954a",
"content_id": "e5b7ebd7fb4452f0f4d64c000be9565a1a27245d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 443,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 15,
"path": "/consumption.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "from __future__ import division\n\n\ndef load(lambdaList, lambdaNominal, serverID):\n\tserverPower = float(0)\n\tserverPowerIdle = float(63)\n\tserverPowerPeak = float(92)\n\t#will need to alter serverLoad input later\n\tserverLoad = float((lambdaList[serverID]) / lambdaNominal)\n\n\tserverPower = serverPowerIdle + ((serverPowerPeak - serverPowerIdle) * serverLoad)\n#\tprint (serverLoad)\n\treturn serverPower\n\n#load([1000, 2003, 1905], 5000, 1)\n\n \n \n\n\n\n"
},
{
"alpha_fraction": 0.5529412031173706,
"alphanum_fraction": 0.572549045085907,
"avg_line_length": 34.42856979370117,
"blob_id": "6d8e505027d5f56a5aa54214886973f194797746",
"content_id": "9bdedd38b3ce4e0b1f02eb567c2c46b64b8558ae",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 510,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 14,
"path": "/copy/PropFair.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "def Propfair(GEvector,Evector,T):\r\n tc=5;\r\n Dvector=[GEvector/T for GEvector,T in zip(GEvector,T)] #metric vector for decision making\r\n MAX=Dvector.index(max(Dvector))\r\n SClist=[0]*len(GEvector) #refresh the Schedule list\r\n SClist[MAX]=1 #The Data Center which is selected\r\n print(SClist)\r\n for i in range(0,len(GEvector)):\r\n if SClist[i]==1:\r\n T[i]=(1-(1/tc))*T[i]+((1/tc))*GEvector[i]\r\n else:\r\n T[i]=(1-(1/tc))*T[i]\r\n \r\n return SClist\r\n"
},
{
"alpha_fraction": 0.5147058963775635,
"alphanum_fraction": 0.5735294222831726,
"avg_line_length": 16,
"blob_id": "0d054138b6977231b58820bb698d1f13bc9d45e2",
"content_id": "d46dbd2617e6cbdc9b1858359424af25b4813907",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 68,
"license_type": "permissive",
"max_line_length": 38,
"num_lines": 4,
"path": "/wget_script.sh",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "for i in {0..50}\n do\n wget http://server-1/sample.txt &\n done\n"
},
{
"alpha_fraction": 0.6248500347137451,
"alphanum_fraction": 0.6520463824272156,
"avg_line_length": 39.32258224487305,
"blob_id": "8ff4a7bab0e6cf3ccaa8d1ce417fd813d225fe98",
"content_id": "f89601adbff39900b18ad43888717ed162049598",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7501,
"license_type": "permissive",
"max_line_length": 167,
"num_lines": 186,
"path": "/copy/simple_switch.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "# Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nAn OpenFlow 1.0 L2 learning switch implementation.\n\"\"\"\n\nimport logging\nimport struct\n\nimport sqlite3\nfrom ryu.base import app_manager\nfrom ryu.controller import mac_to_port\nfrom ryu.controller import ofp_event\nfrom ryu.controller.handler import MAIN_DISPATCHER\nfrom ryu.controller.handler import CONFIG_DISPATCHER\nfrom ryu.controller.handler import set_ev_cls\nfrom ryu.ofproto import ofproto_v1_0\nfrom ryu.lib.mac import haddr_to_bin\nfrom ryu.lib.packet import packet\nfrom ryu.lib.packet import ethernet\nfrom ryu.lib.packet import ipv4\nfrom ryu.controller import dpset\nfrom netaddr import *\nfrom utils import *\nfrom ryu.lib.mac import haddr_to_bin\n'''\nThis file is edited from Ryu example which is located at ryu/ryu/app/simple_switch.py.\nAccording to its licecse(please don't trust my reading and read it), we can modify and use it as long as we keep the old license and state we've change the code. --Joe\n'''\n\nFLOW_HARD_TIMEOUT = 30\nFLOW_IDLE_TIMEOUT = 10\n\nclass SimpleSwitch(app_manager.RyuApp):\n OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION]\n \n servers = [0, [1, '10.10.1.1', '02:71:2a:55:7f:98'], [3, '10.10.1.2', '02:b4:9c:c8:84:42'], [4, '10.10.1.3', '02:51:94:52:e2:a7']]\n serverLoad = [0, 0, 0, 0]\n\n\n def __init__(self, *args, **kwargs):\n super(SimpleSwitch, self).__init__(*args, **kwargs)\n self.mac_to_port = {}\n\n \n def add_flow(self, datapath, match, act, priority=0, idle_timeout=0, flags=0, cookie=0):\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n \n mod = parser.OFPFlowMod(datapath=datapath, priority=priority, match=match, actions=act, flags=flags, idle_timeout=idle_timeout, cookie=cookie)\n datapath.send_msg(mod)\n\n def forward_packet(self, msg, port_list):\n\n datapath = msg.datapath\n ofproto = datapath.ofproto\n\n actions = []\n \n for p in port_list:\n actions.append( createOFAction(datapath, ofproto.OFPAT_OUTPUT, p) )\n\n # install a flow to avoid packet_in next time\n if ofproto.OFPP_FLOOD not in port_list:\n match = getFullMatch( msg )\n sendFlowMod(msg, match, actions, FLOW_HARD_TIMEOUT, FLOW_IDLE_TIMEOUT, msg.buffer_id)\n else :\n\n sendPacketOut(msg=msg, actions=actions, buffer_id=msg.buffer_id)\n\n @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)\n def _packet_in_handler(self, ev):\n dl_type_ipv4 = 0x0800\n msg = ev.msg\n datapath = msg.datapath\n ofproto = datapath.ofproto\n parser = datapath.ofproto_parser\n pkt = packet.Packet(msg.data)\n eth = pkt.get_protocol(ethernet.ethernet)\n dst = eth.dst\n src = eth.src\n\tipv4_pkt = pkt.get_protocol(ipv4.ipv4)\n dpid = datapath.id\n\tif ipv4_pkt:\n self.logger.info(\"packet in %s %s %s %s\", dpid, ipv4_pkt.src, ipv4_pkt.dst, msg.in_port)\n match = parser.OFPMatch (dl_type = dl_type_ipv4, nw_src=self.ipv4_to_int(ipv4_pkt.src))\n serverID = 2 #scheduler()\n actions = [parser.OFPActionSetNwDst(self.ipv4_to_int(self.servers[serverID][1])), \n parser.OFPActionSetDlDst(haddr_to_bin(self.servers[serverID][2])), parser.OFPActionOutput(self.servers[serverID][0])]\n self.serverLoad[serverID]+=1\n self.add_flow(datapath, match, actions, 1, 10, ofproto.OFPFF_SEND_FLOW_REM, 2)\n self.logger.info(\"Flow installed for client %s and serverID %d\", ipv4_pkt.src, serverID)\n actions = []\n actions.append( createOFAction(datapath, ofproto.OFPAT_OUTPUT, self.servers[serverID][0]) ) \n sendPacketOut(msg=msg, actions=actions, buffer_id=msg.buffer_id)\n\n\n\n\t\n\t# if its ipv4_packet, install a flow with certain IDLE_TIME for the client to output to port N, given by the request to the scheduler.\n\t# Send the packet to that port. \n\n#\tfd = os.open(\"/tmp/ryu/Distributed-Internet-Service-Delivery/controller.db\", os.O_RDONLY)\n#\tconn = sqlite3.connect('/dev/fd/%d' % fd)\n#\tos.close(fd)\n# cursor = conn.cursor()\n# addressList = ('10.10.1.1', '10.10.1.2', '10.10.1.3') #filter client packets\n#\tpkt_arp = pkt.get_protocol(arp.arp)\n#\tif pkt_arp:\n# if pkt_arp.dst_ip in addressList: \n#\t print (pkt_arp)\n#\t destination = (pkt_arp.dst_ip,) #get destination ip\n#\t print (destination)\n#\t cursor.execute(\"SELECT * from energyValues where id = (SELECT MAX(id) from energyValues where private_ip = ?)\", destination)\n#\t #\tenergyValue = cursor.fetchone()[1]\n#\t recentInfo = cursor.fetchall()\n# print (recentInfo)\n#\t print (\"Energy value: \" + str(recentInfo[0][1]))\n#\t #\tset.logger.info (\"Last energy value: %s\", str(energyValue))\n\n# self.macLearningHandle(msg)\n\n# out_port = self.get_out_port(msg)\t\n\n #self.forward_packet(msg, [out_port])\n \n @set_ev_cls(ofp_event.EventOFPFlowRemoved, MAIN_DISPATCHER)\n def flow_removal_handler(self, ev):\n msg = ev.msg\n match = msg.match\n\treason = msg.reason\n self.logger.info(\"Client released serverID = %d\", msg.cookie)\n\tself.serverLoad[msg.cookie]-=1\n\n @set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER)\n def _event_switch_enter_handler(self, ev):\n dl_type_ipv4 = 0x0800\n dl_type_arp = 0x0806\n dp = ev.dp\n ofproto = dp.ofproto\n parser = dp.ofproto_parser\n self.logger.info(\"Switch connected %s. Installing default flows...\", dp)\n addressList = ('10.10.1.1', '10.10.1.2', '10.10.1.3') # process packets from servers normally\n # hwAddressList = ('02:71:2a:55:7f:98') #filter client packets\n actions = [parser.OFPActionOutput(ofproto.OFPP_NORMAL)]\n for address in addressList:\n match = parser.OFPMatch(dl_type = dl_type_ipv4, nw_src = address)\n self.add_flow(dp, match, actions, 2, 0) \n# self.logger.info(\"Added l2 flow for address %s\", address)\n \n match = parser.OFPMatch(dl_type = dl_type_arp)#process arp packets normally\n self.add_flow(dp, match, actions, 1, 0)\n\n # match = parser.OFPMatch (dl_type = dl_type_ipv4, nw_src=self.ipv4_to_int('10.10.1.14'))\n # actions = [parser.OFPActionSetNwDst(self.ipv4_to_int(self.servers[2][1])), parser.OFPActionSetDlDst(haddr_to_bin(self.servers[2][2])), \n # parser.OFPActionOutput(self.servers[2][0])]\n \n #self.serverLoad[2]+=1 \n # self.add_flow(dp, match, actions, 1, 10, ofproto.OFPFF_SEND_FLOW_REM, 2)\n\t\n match = parser.OFPMatch ()\n actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER)]\n self.add_flow(dp, match, actions, 0, 0) #add miss flow\n self.logger.info(\"Added default rules for servers and miss-flow\")\n\n def ipv4_to_int(self, string):\n \tip = string.split('.')\n \tassert len(ip) == 4\n \ti = 0\n \tfor b in ip:\n \t\tb = int(b)\n \ti = (i << 8) | b\n return i\n\n"
},
{
"alpha_fraction": 0.5146341323852539,
"alphanum_fraction": 0.5609756112098694,
"avg_line_length": 22.117647171020508,
"blob_id": "0401bf0d7e9c0ac2fbbaa27652c1df88b43e4e58",
"content_id": "81fda9abaa26839bf32f77050a4f15c46619fad4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 410,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 17,
"path": "/RoundRobin.py",
"repo_name": "GraysonScherm/Distributed-Internet-Service-Delivery",
"src_encoding": "UTF-8",
"text": "def RoundRobin(SClist):\r\n if (1 in SClist)==False:\r\n SClist[0]=1\r\n return SClist\r\n elif (SClist.index(1))==(len(SClist)-1):\r\n SClist[SClist.index(1)]=0\r\n SClist[0]=1\r\n return SClist\r\n else:\r\n SClist[SClist.index(1)+1]=1;\r\n SClist[SClist.index(1)]=0;\r\n return SClist\r\n\r\nSClist=[0]*3\r\nfor i in range(1,10):\r\n SClist=RoundRobin(SClist)\r\n print(SClist)\r\n"
}
] | 13 |
fancyshon/Graduation_Topic | https://github.com/fancyshon/Graduation_Topic | d07cd73e2f32c9fc6f8e0542efc55e1cc53f04d4 | 51ec51eddb194e5c7481543366a783e34ee413bf | 62d8265a2b78652f11a6bbaeea1b10e43fc6e2db | refs/heads/master | 2023-08-15T02:54:16.877205 | 2021-09-24T08:31:38 | 2021-09-24T08:31:38 | 409,890,613 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5014469027519226,
"alphanum_fraction": 0.5500994920730591,
"avg_line_length": 33.34161376953125,
"blob_id": "2746c22abfebb6d00af0fc5ee146c7db5e182a12",
"content_id": "1bdadf7819a90340bb82ef76206e83000bf3f682",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11058,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 322,
"path": "/Client/file_receiver.py",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "UTF-8",
"text": "import socket\nimport sys\nimport os\nimport stat\nimport time\nimport hmac\nimport hashlib\nimport tkinter as tk\nimport tkinter.ttk as ttk\n\nMax_pack_size = 256\nLocal_ip = '10.1.1.2'\nLocal_port = 54321\nDest_ip = '10.1.1.4'\nDest_port = 54321\n\n# # Start window\n# window = tk.Tk()\n# window.title('Project')\n# window.geometry('800x500')\n\n# titleLabel = tk.Label(window, text='File tranfer')\n# titleLabel.pack()\n\n# ipFrame = tk.Frame(window)\n# ipFrame.pack(side=tk.TOP)\n# ipLabel = tk.Label(ipFrame, text='Destination IP')\n# ipLabel.pack(side=tk.LEFT)\n# ipEntry = tk.Entry(ipFrame)\n# ipEntry.insert(0, \"10.1.1.4\")\n# ipEntry.pack(side=tk.LEFT)\n\n# ipFrame2 = tk.Frame(window)\n# ipFrame2.pack(side=tk.TOP)\n# ipLabel2 = tk.Label(ipFrame2, text='Local IP')\n# ipLabel2.pack(side=tk.LEFT)\n# ipEntry2 = tk.Entry(ipFrame2)\n# ipEntry2.insert(0, '10.1.1.2')\n# ipEntry2.pack(side=tk.LEFT)\n\n# portFrame = tk.Frame(window)\n# portFrame.pack(side=tk.TOP)\n# portLabel = tk.Label(portFrame, text='Destination Port')\n# portLabel.pack(side=tk.LEFT)\n# portEntry = tk.Entry(portFrame)\n# portEntry.insert(0, \"12345\")\n# portEntry.pack(side=tk.LEFT)\n\n# portFrame2 = tk.Frame(window)\n# portFrame2.pack(side=tk.TOP)\n# portLabel2 = tk.Label(portFrame2, text='Local Port')\n# portLabel2.pack(side=tk.LEFT)\n# portEntry2 = tk.Entry(portFrame2)\n# portEntry2.insert(0, \"54321\")\n# portEntry2.pack(side=tk.LEFT)\n\n\n# def getInfo():\n# global Dest_ip, Dest_port, Local_ip, Local_port\n\n# Dest_ip = ipEntry.get()\n# Dest_port = int(portEntry.get())\n# Local_ip = ipEntry2.get()\n# Local_port = int(portEntry2.get())\n# window.destroy()\n\n\n# confirmBtn = tk.Button(window, text='OK', command=getInfo)\n# confirmBtn.pack()\n\n# window.mainloop()\n\n# Static\nkey = bytearray()\nh_key = bytearray()\nfilesys_file_name = []\nfilesys_file_size = {}\n\ninv_S = [\n b'\\x52', b'\\x09', b'\\x6A', b'\\xD5', b'\\x30', b'\\x36', b'\\xA5', b'\\x38', b'\\xBF', b'\\x40', b'\\xA3', b'\\x9E', b'\\x81', b'\\xF3', b'\\xD7', b'\\xFB',\n b'\\x7C', b'\\xE3', b'\\x39', b'\\x82', b'\\x9B', b'\\x2F', b'\\xFF', b'\\x87', b'\\x34', b'\\x8E', b'\\x43', b'\\x44', b'\\xC4', b'\\xDE', b'\\xE9', b'\\xCB',\n b'\\x54', b'\\x7B', b'\\x94', b'\\x32', b'\\xA6', b'\\xC2', b'\\x23', b'\\x3D', b'\\xEE', b'\\x4C', b'\\x95', b'\\x0B', b'\\x42', b'\\xFA', b'\\xC3', b'\\x4E',\n b'\\x08', b'\\x2E', b'\\xA1', b'\\x66', b'\\x28', b'\\xD9', b'\\x24', b'\\xB2', b'\\x76', b'\\x5B', b'\\xA2', b'\\x49', b'\\x6D', b'\\x8B', b'\\xD1', b'\\x25',\n b'\\x72', b'\\xF8', b'\\xF6', b'\\x64', b'\\x86', b'\\x68', b'\\x98', b'\\x16', b'\\xD4', b'\\xA4', b'\\x5C', b'\\xCC', b'\\x5D', b'\\x65', b'\\xB6', b'\\x92',\n b'\\x6C', b'\\x70', b'\\x48', b'\\x50', b'\\xFD', b'\\xED', b'\\xB9', b'\\xDA', b'\\x5E', b'\\x15', b'\\x46', b'\\x57', b'\\xA7', b'\\x8D', b'\\x9D', b'\\x84',\n b'\\x90', b'\\xD8', b'\\xAB', b'\\x00', b'\\x8C', b'\\xBC', b'\\xD3', b'\\x0A', b'\\xF7', b'\\xE4', b'\\x58', b'\\x05', b'\\xB8', b'\\xB3', b'\\x45', b'\\x06',\n b'\\xD0', b'\\x2C', b'\\x1E', b'\\x8F', b'\\xCA', b'\\x3F', b'\\x0F', b'\\x02', b'\\xC1', b'\\xAF', b'\\xBD', b'\\x03', b'\\x01', b'\\x13', b'\\x8A', b'\\x6B',\n b'\\x3A', b'\\x91', b'\\x11', b'\\x41', b'\\x4F', b'\\x67', b'\\xDC', b'\\xEA', b'\\x97', b'\\xF2', b'\\xCF', b'\\xCE', b'\\xF0', b'\\xB4', b'\\xE6', b'\\x73',\n b'\\x96', b'\\xAC', b'\\x74', b'\\x22', b'\\xE7', b'\\xAD', b'\\x35', b'\\x85', b'\\xE2', b'\\xF9', b'\\x37', b'\\xE8', b'\\x1C', b'\\x75', b'\\xDF', b'\\x6E',\n b'\\x47', b'\\xF1', b'\\x1A', b'\\x71', b'\\x1D', b'\\x29', b'\\xC5', b'\\x89', b'\\x6F', b'\\xB7', b'\\x62', b'\\x0E', b'\\xAA', b'\\x18', b'\\xBE', b'\\x1B',\n b'\\xFC', b'\\x56', b'\\x3E', b'\\x4B', b'\\xC6', b'\\xD2', b'\\x79', b'\\x20', b'\\x9A', b'\\xDB', b'\\xC0', b'\\xFE', b'\\x78', b'\\xCD', b'\\x5A', b'\\xF4',\n b'\\x1F', b'\\xDD', b'\\xA8', b'\\x33', b'\\x88', b'\\x07', b'\\xC7', b'\\x31', b'\\xB1', b'\\x12', b'\\x10', b'\\x59', b'\\x27', b'\\x80', b'\\xEC', b'\\x5F',\n b'\\x60', b'\\x51', b'\\x7F', b'\\xA9', b'\\x19', b'\\xB5', b'\\x4A', b'\\x0D', b'\\x2D', b'\\xE5', b'\\x7A', b'\\x9F', b'\\x93', b'\\xC9', b'\\x9C', b'\\xEF',\n b'\\xA0', b'\\xE0', b'\\x3B', b'\\x4D', b'\\xAE', b'\\x2A', b'\\xF5', b'\\xB0', b'\\xC8', b'\\xEB', b'\\xBB', b'\\x3C', b'\\x83', b'\\x53', b'\\x99', b'\\x61',\n b'\\x17', b'\\x2B', b'\\x04', b'\\x7E', b'\\xBA', b'\\x77', b'\\xD6', b'\\x26', b'\\xE1', b'\\x69', b'\\x14', b'\\x63', b'\\x55', b'\\x21', b'\\x0C', b'\\x7D'\n]\n\n\ndef byte_mul(byte1, byte2):\n mul = hex((int.from_bytes(byte1, 'big') *\n int.from_bytes(byte2, 'big')) & 0xFF)\n result = mul.encode('utf-8')\n return result\n\n\ndef xorbyte(var, key):\n return bytes(a ^ b for a, b in zip(var, key))\n\n\ndef reverse_subbyte(var):\n var = bytes(inv_S[int.from_bytes(var, \"big\")])\n return var\n\n\ndef reverse_shift_row(byte_array, length):\n # Right Shift\n\n new_array = bytearray(byte_array)\n for i in range(0, length):\n if(i == 0):\n new_array[0:1] = byte_array[length-1:length]\n elif(i == 1):\n new_array[1:2] = byte_array[0:1]\n else:\n new_array[i:i+1] = byte_array[i-1:i]\n\n return new_array\n\n\ndef reverse_mix_column(byte_array, length):\n\n return byte_array\n\n\ndef AES_decrypt(origin_array, key, length):\n '''\n byte_array = bytearray(origin_array)\n # Round 10 ~ 1\n for j in range(0, 10):\n if(j == 0):\n for i in range(0, length):\n byte_array[i:i+ \\\n 1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n byte_array[i:i+1] = reverse_subbyte(byte_array[i:i+1])\n byte_array = reverse_shift_row(byte_array, length)\n else:\n byte_array = reverse_mix_column(byte_array, length)\n for i in range(0, length):\n byte_array[i:i+ \\\n 1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n byte_array[i:i+1] = reverse_subbyte(byte_array[i:i+1])\n byte_array = reverse_shift_row(byte_array, length)\n # Round 0\n for i in range(0, length):\n byte_array[i:i+ \\\n 1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n '''\n return origin_array\n\n\ndef read_packet_control_byte(origin_byte):\n control_byte = origin_byte[0:1]\n byte_array = bytearray(origin_byte[1:])\n return control_byte, byte_array\n\n\ndef add_packet_control_byte(number, origin_byte):\n byte_array = bytearray()\n byte_array.append(number)\n byte_array += origin_byte\n return byte_array\n\n\ndef ecc_key(receiver):\n packet = bytearray()\n packet = add_packet_control_byte(3, packet)\n packet += \"123\".encode(\"utf-8\")\n receiver.send(packet)\n print(\"Send Ecc public key \")\n\n\ndef file_transmission(receiver):\n\n # Send request\n selectFile = tk.Tk()\n selectFile.geometry(\"400x200\")\n\n def getFileName():\n byte_array = bytearray()\n Decrypted_array = bytearray()\n hmac_array = bytearray()\n request_packet = bytearray()\n recv_size = 0\n file_size = 0\n file_name = ''\n\n file_name = fileList.get()\n\n request_packet = add_packet_control_byte(2, request_packet)\n request_packet += file_name.encode(\"utf-8\")\n receiver.send(request_packet)\n\n f = open(file_name, \"wb\")\n file_size = filesys_file_size[file_name]\n\n start_time = time.time()\n # Receive file data\n while not recv_size == file_size:\n if file_size - recv_size > 256:\n # 256 + 1\n data = receiver.recv(289)\n recv_size += 256\n control_byte = data[0:1]\n if(control_byte == '9'):\n print(\"Mac Error\")\n break\n byte_array += bytearray(data[1:257])\n Decrypted_array += byte_array\n byte_array.clear()\n print(recv_size)\n else:\n data = receiver.recv(file_size - recv_size + 1 + 32)\n remain_size = file_size - recv_size\n recv_size += file_size - recv_size\n control_byte = data[0:1]\n if(control_byte == '9'):\n print(\"Mac Error\")\n break\n byte_array += bytearray(data[1:remain_size + 1])\n Decrypted_array += byte_array\n byte_array.clear()\n print(recv_size)\n # Write data to file\n f.write(Decrypted_array)\n\n print(\"Completed receiving\")\n print(\"Filename: \", file_name)\n print(\"Filesize: \", os.stat(file_name)[stat.ST_SIZE])\n print(\"Consecution time: %s second\" % (time.time() - start_time))\n print()\n f.close()\n\n print(len(Decrypted_array))\n\n selectFile.destroy()\n\n label = tk.Label(selectFile, text='Select a file')\n label.pack()\n fileList = ttk.Combobox(selectFile, value=filesys_file_name)\n fileList.pack()\n okBtn = tk.Button(selectFile, text='OK', command=getFileName)\n okBtn.pack()\n selectFile.mainloop()\n\n # file_name = input(\"Input file name: \")\n\n\ndef find_file_on_server(receiver):\n request_packet = bytearray()\n response_packet = bytearray()\n request_packet = add_packet_control_byte(5, request_packet)\n request_packet += (\"321\").encode(\"utf-8\")\n\n receiver.send(request_packet)\n response_packet = receiver.recv(1024)\n control_byte, response_packet = read_packet_control_byte(response_packet)\n if(control_byte == b'\\x06'):\n number_of_file = response_packet[0:1]\n response_packet = response_packet[1:]\n number_of_file = int.from_bytes(bytes(number_of_file), 'big')\n index = 0\n for i in range(0, number_of_file):\n filesys_file_name.append(\n response_packet.decode(\"utf-8\").split(\"\\n\")[index])\n index += 1\n filesys_file_size[filesys_file_name[i]] = int(\n float(response_packet.decode(\"utf-8\").split(\"\\n\")[index]))\n index += 1\n else:\n print(\"Find file on server error!\\n\")\n\n\nif __name__ == '__main__':\n for i in range(0, 16):\n h_key.append(1)\n\n key_count = 0\n for i in range(0, 16):\n key.append(key_count)\n key_count += 1\n\n receiver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n receiver.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 57800)\n receiver.bind((Local_ip, Local_port))\n receiver.connect((Dest_ip, Dest_port))\n print(\"It's Client\")\n print(\"Success connect to\", str((Dest_ip, Dest_port)))\n print()\n ecc_key(receiver)\n time.sleep(1/1000000)\n find_file_on_server(receiver)\n\n while(1):\n print(\"Files on server:\\n\")\n print(\"%-20s %-20s\" % (\"File name\", \"File size(byte)\"))\n print()\n for i in range(0, len(filesys_file_name)):\n print(\"%-20s\" % filesys_file_name[i], end=\" \")\n print(\"%-20s\" % filesys_file_size[filesys_file_name[i]])\n print()\n print(\"Choose function:\")\n print(\"\\tType 1 for file transmision\")\n print(\"\\tType 0 exit\")\n function = input()\n if(function == '1'):\n file_transmission(receiver)\n elif(function == '0'):\n break\n\n print(\"Client Closed!\")\n receiver.shutdown(2)\n receiver.close()\n"
},
{
"alpha_fraction": 0.4414830505847931,
"alphanum_fraction": 0.5025728940963745,
"avg_line_length": 37.86666488647461,
"blob_id": "1c7fb4237b90c245d45d37ca453228b6bcb1aa75",
"content_id": "9b420b055401cce221f8fb79c2f004b82f892cad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7579,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 195,
"path": "/Client/file_receiver_without_NXP.py",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "UTF-8",
"text": "import socket\nimport sys\nimport os\nimport stat\nimport time\nimport hmac\nimport hashlib\n\nLocal_ip = '127.0.0.1'\nLocal_port = 12345\n\ninv_S = [\n b'\\x52', b'\\x09', b'\\x6A', b'\\xD5', b'\\x30', b'\\x36', b'\\xA5', b'\\x38', b'\\xBF', b'\\x40', b'\\xA3', b'\\x9E', b'\\x81', b'\\xF3', b'\\xD7', b'\\xFB',\n b'\\x7C', b'\\xE3', b'\\x39', b'\\x82', b'\\x9B', b'\\x2F', b'\\xFF', b'\\x87', b'\\x34', b'\\x8E', b'\\x43', b'\\x44', b'\\xC4', b'\\xDE', b'\\xE9', b'\\xCB',\n b'\\x54', b'\\x7B', b'\\x94', b'\\x32', b'\\xA6', b'\\xC2', b'\\x23', b'\\x3D', b'\\xEE', b'\\x4C', b'\\x95', b'\\x0B', b'\\x42', b'\\xFA', b'\\xC3', b'\\x4E',\n b'\\x08', b'\\x2E', b'\\xA1', b'\\x66', b'\\x28', b'\\xD9', b'\\x24', b'\\xB2', b'\\x76', b'\\x5B', b'\\xA2', b'\\x49', b'\\x6D', b'\\x8B', b'\\xD1', b'\\x25',\n b'\\x72', b'\\xF8', b'\\xF6', b'\\x64', b'\\x86', b'\\x68', b'\\x98', b'\\x16', b'\\xD4', b'\\xA4', b'\\x5C', b'\\xCC', b'\\x5D', b'\\x65', b'\\xB6', b'\\x92',\n b'\\x6C', b'\\x70', b'\\x48', b'\\x50', b'\\xFD', b'\\xED', b'\\xB9', b'\\xDA', b'\\x5E', b'\\x15', b'\\x46', b'\\x57', b'\\xA7', b'\\x8D', b'\\x9D', b'\\x84',\n b'\\x90', b'\\xD8', b'\\xAB', b'\\x00', b'\\x8C', b'\\xBC', b'\\xD3', b'\\x0A', b'\\xF7', b'\\xE4', b'\\x58', b'\\x05', b'\\xB8', b'\\xB3', b'\\x45', b'\\x06',\n b'\\xD0', b'\\x2C', b'\\x1E', b'\\x8F', b'\\xCA', b'\\x3F', b'\\x0F', b'\\x02', b'\\xC1', b'\\xAF', b'\\xBD', b'\\x03', b'\\x01', b'\\x13', b'\\x8A', b'\\x6B',\n b'\\x3A', b'\\x91', b'\\x11', b'\\x41', b'\\x4F', b'\\x67', b'\\xDC', b'\\xEA', b'\\x97', b'\\xF2', b'\\xCF', b'\\xCE', b'\\xF0', b'\\xB4', b'\\xE6', b'\\x73',\n b'\\x96', b'\\xAC', b'\\x74', b'\\x22', b'\\xE7', b'\\xAD', b'\\x35', b'\\x85', b'\\xE2', b'\\xF9', b'\\x37', b'\\xE8', b'\\x1C', b'\\x75', b'\\xDF', b'\\x6E',\n b'\\x47', b'\\xF1', b'\\x1A', b'\\x71', b'\\x1D', b'\\x29', b'\\xC5', b'\\x89', b'\\x6F', b'\\xB7', b'\\x62', b'\\x0E', b'\\xAA', b'\\x18', b'\\xBE', b'\\x1B',\n b'\\xFC', b'\\x56', b'\\x3E', b'\\x4B', b'\\xC6', b'\\xD2', b'\\x79', b'\\x20', b'\\x9A', b'\\xDB', b'\\xC0', b'\\xFE', b'\\x78', b'\\xCD', b'\\x5A', b'\\xF4',\n b'\\x1F', b'\\xDD', b'\\xA8', b'\\x33', b'\\x88', b'\\x07', b'\\xC7', b'\\x31', b'\\xB1', b'\\x12', b'\\x10', b'\\x59', b'\\x27', b'\\x80', b'\\xEC', b'\\x5F',\n b'\\x60', b'\\x51', b'\\x7F', b'\\xA9', b'\\x19', b'\\xB5', b'\\x4A', b'\\x0D', b'\\x2D', b'\\xE5', b'\\x7A', b'\\x9F', b'\\x93', b'\\xC9', b'\\x9C', b'\\xEF',\n b'\\xA0', b'\\xE0', b'\\x3B', b'\\x4D', b'\\xAE', b'\\x2A', b'\\xF5', b'\\xB0', b'\\xC8', b'\\xEB', b'\\xBB', b'\\x3C', b'\\x83', b'\\x53', b'\\x99', b'\\x61',\n b'\\x17', b'\\x2B', b'\\x04', b'\\x7E', b'\\xBA', b'\\x77', b'\\xD6', b'\\x26', b'\\xE1', b'\\x69', b'\\x14', b'\\x63', b'\\x55', b'\\x21', b'\\x0C', b'\\x7D'\n]\n\n\ndef byte_mul(byte1, byte2):\n mul = hex((int.from_bytes(byte1, 'big') * int.from_bytes(byte2, 'big')) & 0xFF)\n result = mul.encode('utf-8')\n return result\n\n\ndef xorbyte(var, key):\n return bytes(a ^ b for a, b in zip(var, key))\n\n\ndef reverse_subbyte(var):\n var = bytes(inv_S[int.from_bytes(var, \"big\")])\n return var\n\n\ndef reverse_shift_row(byte_array, length):\n # Right Shift\n\n new_array = bytearray(byte_array)\n for i in range(0, length):\n if(i == 0):\n new_array[0:1] = byte_array[length-1:length]\n elif(i == 1):\n new_array[1:2] = byte_array[0:1]\n else:\n new_array[i:i+1] = byte_array[i-1:i]\n\n return new_array\n\n\ndef reverse_mix_column(byte_array, length):\n\n return byte_array\n\n\ndef AES_decrypt(origin_array, key, length):\n\n byte_array = bytearray(origin_array)\n\n # Round 10 ~ 1\n for j in range(0, 10):\n if(j == 0):\n for i in range(0, length):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n byte_array[i:i+1] = reverse_subbyte(byte_array[i:i+1])\n byte_array = reverse_shift_row(byte_array, length)\n else:\n byte_array = reverse_mix_column(byte_array, length)\n for i in range(0, length):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n byte_array[i:i+1] = reverse_subbyte(byte_array[i:i+1])\n byte_array = reverse_shift_row(byte_array, length)\n\n # Round 0\n for i in range(0, length):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n\n return byte_array\n\n\ndef read_packet_control_byte(origin_byte):\n control_byte = origin_byte[0:1]\n byte_array = bytearray(origin_byte[1:])\n return control_byte, byte_array\n\n\nif __name__ == '__main__':\n\n key = bytearray()\n h_key = bytearray()\n byte_array = bytearray()\n Decrypted_array = bytearray()\n hmac_array = bytearray()\n recv_size = 0\n file_size = 0\n file_name = ''\n\n for i in range(0, 16):\n h_key.append(1)\n\n key_count = 0\n for i in range(0, 16):\n key.append(key_count)\n key_count += 1\n\n receiver = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n receiver.bind((Local_ip, Local_port))\n receiver.listen(5)\n\n print(\"Wait...\")\n conn, addr = receiver.accept()\n start_time = time.time()\n\n # Receive file information\n\n info_packet = conn.recv(100)\n control_byte, info_packet = read_packet_control_byte(info_packet)\n file_size = int(float(info_packet.decode(\"utf-8\").split(\"\\n\")[0]))\n file_name = info_packet.decode(\"utf-8\").split(\"\\n\")[1]\n\n file_name = \"test1.mp4\"\n f = open(file_name, \"wb\")\n\n # Receive file data\n while not recv_size == file_size:\n if file_size - recv_size > 256:\n # 256 + 32 + 1\n data = conn.recv(289)\n recv_size += 256\n control_byte = data[0:1]\n byte_array += bytearray(data[1:257])\n hmac_array += bytearray(data[257:289])\n print(hmac_array)\n sig = hmac.new(h_key, byte_array, hashlib.sha256).digest()\n if(sig != hmac_array):\n print(\"MAC ERROR\")\n break\n else:\n array_offset = 0\n for i in range(0, 16):\n Decrypted_array += AES_decrypt(\n byte_array[array_offset:array_offset+16], key, 16)\n array_offset += 16\n byte_array.clear()\n hmac_array.clear()\n\n else:\n data = conn.recv(file_size - recv_size + 32 + 1)\n remain_size = file_size - recv_size\n recv_size += file_size - recv_size\n control_byte = data[0:1]\n byte_array += bytearray(data[1:remain_size + 1])\n hmac_array += bytearray(data[remain_size + 1:remain_size + 1 + 32])\n sig = hmac.new(h_key, byte_array, hashlib.sha256).digest()\n if(sig != hmac_array):\n print(\"MAC ERROR\")\n break\n else:\n array_offset = 0\n while(1):\n if(remain_size < 16):\n Decrypted_array += AES_decrypt(\n byte_array[array_offset:array_offset+remain_size], key, remain_size)\n break\n else:\n Decrypted_array += AES_decrypt(\n byte_array[array_offset:array_offset+16], key, 16)\n array_offset += 16\n remain_size -= 16\n byte_array.clear()\n hmac_array.clear()\n\n # Write data to file\n f.write(Decrypted_array)\n conn.close()\n\n print(\"Completed receiving\")\n print(\"Filename: \", file_name)\n print(\"Filesize: \", os.stat(file_name)[stat.ST_SIZE])\n print(\"File from \" + str(addr))\n print(\"Consecution time: %s second\" % (time.time() - start_time))\n\n f.close()\n receiver.close()\n"
},
{
"alpha_fraction": 0.44281643629074097,
"alphanum_fraction": 0.5044091939926147,
"avg_line_length": 38,
"blob_id": "4f418efd8aa176f1068f987a5d81f0db52b370ad",
"content_id": "df03ecc74991e7c535b6e7a808185536e86fb078",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7371,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 189,
"path": "/test_client.py",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "UTF-8",
"text": "import socket\nimport os\nimport stat\nimport time\nimport sys\nimport hmac\nimport hashlib\n\n\nMax_pack_size = 256\nDest_ip = '10.1.1.4'\nDest_port = 54321\nLocal_ip = '10.1.1.2'\nLocal_port = 54321\nfile_name = \"test.mp4\"\n\nS = [\n b'\\x63', b'\\x7C', b'\\x77', b'\\x7B', b'\\xF2', b'\\x6B', b'\\x6F', b'\\xC5', b'\\x30', b'\\x01', b'\\x67', b'\\x2B', b'\\xFE', b'\\xD7', b'\\xAB', b'\\x76',\n b'\\xCA', b'\\x82', b'\\xC9', b'\\x7D', b'\\xFA', b'\\x59', b'\\x47', b'\\xF0', b'\\xAD', b'\\xD4', b'\\xA2', b'\\xAF', b'\\x9C', b'\\xA4', b'\\x72', b'\\xC0',\n b'\\xB7', b'\\xFD', b'\\x93', b'\\x26', b'\\x36', b'\\x3F', b'\\xF7', b'\\xCC', b'\\x34', b'\\xA5', b'\\xE5', b'\\xF1', b'\\x71', b'\\xD8', b'\\x31', b'\\x15',\n b'\\x04', b'\\xC7', b'\\x23', b'\\xC3', b'\\x18', b'\\x96', b'\\x05', b'\\x9A', b'\\x07', b'\\x12', b'\\x80', b'\\xE2', b'\\xEB', b'\\x27', b'\\xB2', b'\\x75',\n b'\\x09', b'\\x83', b'\\x2C', b'\\x1A', b'\\x1B', b'\\x6E', b'\\x5A', b'\\xA0', b'\\x52', b'\\x3B', b'\\xD6', b'\\xB3', b'\\x29', b'\\xE3', b'\\x2F', b'\\x84',\n b'\\x53', b'\\xD1', b'\\x00', b'\\xED', b'\\x20', b'\\xFC', b'\\xB1', b'\\x5B', b'\\x6A', b'\\xCB', b'\\xBE', b'\\x39', b'\\x4A', b'\\x4C', b'\\x58', b'\\xCF',\n b'\\xD0', b'\\xEF', b'\\xAA', b'\\xFB', b'\\x43', b'\\x4D', b'\\x33', b'\\x85', b'\\x45', b'\\xF9', b'\\x02', b'\\x7F', b'\\x50', b'\\x3C', b'\\x9F', b'\\xA8',\n b'\\x51', b'\\xA3', b'\\x40', b'\\x8F', b'\\x92', b'\\x9D', b'\\x38', b'\\xF5', b'\\xBC', b'\\xB6', b'\\xDA', b'\\x21', b'\\x10', b'\\xFF', b'\\xF3', b'\\xD2',\n b'\\xCD', b'\\x0C', b'\\x13', b'\\xEC', b'\\x5F', b'\\x97', b'\\x44', b'\\x17', b'\\xC4', b'\\xA7', b'\\x7E', b'\\x3D', b'\\x64', b'\\x5D', b'\\x19', b'\\x73',\n b'\\x60', b'\\x81', b'\\x4F', b'\\xDC', b'\\x22', b'\\x2A', b'\\x90', b'\\x88', b'\\x46', b'\\xEE', b'\\xB8', b'\\x14', b'\\xDE', b'\\x5E', b'\\x0B', b'\\xDB',\n b'\\xE0', b'\\x32', b'\\x3A', b'\\x0A', b'\\x49', b'\\x06', b'\\x24', b'\\x5C', b'\\xC2', b'\\xD3', b'\\xAC', b'\\x62', b'\\x91', b'\\x95', b'\\xE4', b'\\x79',\n b'\\xE7', b'\\xC8', b'\\x37', b'\\x6D', b'\\x8D', b'\\xD5', b'\\x4E', b'\\xA9', b'\\x6C', b'\\x56', b'\\xF4', b'\\xEA', b'\\x65', b'\\x7A', b'\\xAE', b'\\x08',\n b'\\xBA', b'\\x78', b'\\x25', b'\\x2E', b'\\x1C', b'\\xA6', b'\\xB4', b'\\xC6', b'\\xE8', b'\\xDD', b'\\x74', b'\\x1F', b'\\x4B', b'\\xBD', b'\\x8B', b'\\x8A',\n b'\\x70', b'\\x3E', b'\\xB5', b'\\x66', b'\\x48', b'\\x03', b'\\xF6', b'\\x0E', b'\\x61', b'\\x35', b'\\x57', b'\\xB9', b'\\x86', b'\\xC1', b'\\x1D', b'\\x9E',\n b'\\xE1', b'\\xF8', b'\\x98', b'\\x11', b'\\x69', b'\\xD9', b'\\x8E', b'\\x94', b'\\x9B', b'\\x1E', b'\\x87', b'\\xE9', b'\\xCE', b'\\x55', b'\\x28', b'\\xDF',\n b'\\x8C', b'\\xA1', b'\\x89', b'\\x0D', b'\\xBF', b'\\xE6', b'\\x42', b'\\x68', b'\\x41', b'\\x99', b'\\x2D', b'\\x0F', b'\\xB0', b'\\x54', b'\\xBB', b'\\x16'\n]\n\n\ndef byte_mul(byte1, byte2):\n mul = hex((int.from_bytes(byte1, 'big') * int.from_bytes(byte2, 'big')) & 0xFF)\n result = mul.encode('utf-8')\n return result\n\n\ndef xorbyte(var, key):\n return bytes(a ^ b for a, b in zip(var, key))\n\n\ndef subbyte(var):\n var = bytes(S[int.from_bytes(var, \"big\")])\n return var\n\n\ndef shift_row(byte_array, length):\n # Left Shift\n\n new_array = bytearray(byte_array)\n for i in range(0, length):\n if(i == length - 1):\n new_array[length-1:length] = byte_array[0:1]\n else:\n new_array[i:i+1] = byte_array[i+1:i+2]\n\n return new_array\n\n\ndef mix_column(byte_array, length):\n\n return byte_array\n\n\ndef AES_encrypt(origin_byte, key, length):\n\n byte_array = bytearray(origin_byte)\n '''\n # Round 0\n for i in range(0, length):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n\n # Round 1 ~ 10\n for j in range(0, 10):\n if(j == 9):\n for i in range(0, length):\n byte_array[i:i+1] = subbyte(byte_array[i:i+1])\n byte_array = shift_row(byte_array, length)\n for i in range(0, length):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n else:\n for i in range(0, length):\n byte_array[i:i+1] = subbyte(byte_array[i:i+1])\n byte_array = shift_row(byte_array, length)\n byte_array = mix_column(byte_array, length)\n for i in range(0, length):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n '''\n return byte_array\n\n\ndef add_packet_control_byte(number, origin_byte):\n byte_array = bytearray()\n byte_array.append(number)\n byte_array += origin_byte\n return byte_array\n\n\nif __name__ == '__main__':\n\n byte_array = bytearray()\n encrypted_array = bytearray()\n ch = bytearray()\n Pack_Size = 0\n send_size = 0\n List = []\n\n # Encrypt key\n key = bytearray()\n key_count = 0\n for i in range(0, 16):\n key.append(key_count)\n key_count += 1\n\n # Hmac key\n h_key = bytearray()\n for i in range(0, 16):\n h_key.append(1)\n\n # Read file in binary\n f = open(file_name, \"rb\")\n file_size = os.stat(file_name)[stat.ST_SIZE]\n for i in range(0, file_size):\n ch = f.read(1)\n List.append(ch)\n\n # Connect\n sender = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n #sender.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 289)\n sender.bind((Local_ip, Local_port))\n sender.connect((Dest_ip, Dest_port))\n print(\"Local IP: \", Local_ip)\n print(\"Local Port: \", Local_port)\n print(\"Success connect to\", str((Dest_ip, Dest_port)))\n\n trash = input()\n\n # Send file information: file size, file name\n info_packet = str(file_size) + '\\n' + file_name + '\\n'\n info_packet = add_packet_control_byte(2, info_packet.encode(\"utf-8\"))\n info_packet += hmac.new(h_key, info_packet[1:], hashlib.sha256).digest()\n sender.send(info_packet)\n\n # Sender send file data\n for i in range(0, file_size):\n Pack_Size = Pack_Size + 1\n byte_array += List[i]\n if(Pack_Size == Max_pack_size or i == file_size - 1):\n if(Pack_Size == Max_pack_size):\n array_offset = 0\n encrypted_array.append(0)\n for j in range(0, 16):\n encrypted_array += bytearray(AES_encrypt(\n byte_array[array_offset:array_offset+16], key, 16))\n array_offset += 16\n send_size += 256\n elif(i == file_size - 1):\n remain_size = file_size - send_size\n encrypted_array.append(0)\n array_offset = 0\n while(1):\n if(remain_size < 16):\n encrypted_array += bytearray(AES_encrypt(\n byte_array[array_offset:array_offset+remain_size], key, remain_size))\n send_size += remain_size\n break\n else:\n encrypted_array += bytearray(AES_encrypt(\n byte_array[array_offset:array_offset+16], key, 16))\n send_size += 16\n array_offset += 16\n remain_size -= 16\n # Packet size = 256 + 32 + 1bytes\n encrypted_array += hmac.new(h_key, encrypted_array[1:], hashlib.sha256).digest()\n sender.send(encrypted_array)\n print(send_size)\n Pack_Size = 0\n byte_array.clear()\n encrypted_array.clear()\n trash = sender.recv(1000)\n\n print(\"Completed sending\")\n f.close()\n sender.close()\n"
},
{
"alpha_fraction": 0.4802376627922058,
"alphanum_fraction": 0.5307413935661316,
"avg_line_length": 31.3933048248291,
"blob_id": "402111b2d2b17ddece1a6b948b2fda977b204e0a",
"content_id": "e531ba3ad462fad2f89bc6583e555680698aed1c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 15484,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 478,
"path": "/test_server.py",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "UTF-8",
"text": "import socket\nimport os\nimport stat\nimport time\nimport sys\nimport hmac\nimport hashlib\nimport collections\nimport random\nfrom binascii import unhexlify\nimport tkinter as tk\nfrom tkinter import filedialog\n\n# Static\nMax_pack_size = 256\nLocal_ip = '10.1.1.2'\nLocal_port = 12345\nDest_ip = '10.1.1.4'\nDest_port = 12345\nfilesys_file_name = []\nfilesys_file_size = {}\n\n\nS = [\n b'\\x63', b'\\x7C', b'\\x77', b'\\x7B', b'\\xF2', b'\\x6B', b'\\x6F', b'\\xC5', b'\\x30', b'\\x01', b'\\x67', b'\\x2B', b'\\xFE', b'\\xD7', b'\\xAB', b'\\x76',\n b'\\xCA', b'\\x82', b'\\xC9', b'\\x7D', b'\\xFA', b'\\x59', b'\\x47', b'\\xF0', b'\\xAD', b'\\xD4', b'\\xA2', b'\\xAF', b'\\x9C', b'\\xA4', b'\\x72', b'\\xC0',\n b'\\xB7', b'\\xFD', b'\\x93', b'\\x26', b'\\x36', b'\\x3F', b'\\xF7', b'\\xCC', b'\\x34', b'\\xA5', b'\\xE5', b'\\xF1', b'\\x71', b'\\xD8', b'\\x31', b'\\x15',\n b'\\x04', b'\\xC7', b'\\x23', b'\\xC3', b'\\x18', b'\\x96', b'\\x05', b'\\x9A', b'\\x07', b'\\x12', b'\\x80', b'\\xE2', b'\\xEB', b'\\x27', b'\\xB2', b'\\x75',\n b'\\x09', b'\\x83', b'\\x2C', b'\\x1A', b'\\x1B', b'\\x6E', b'\\x5A', b'\\xA0', b'\\x52', b'\\x3B', b'\\xD6', b'\\xB3', b'\\x29', b'\\xE3', b'\\x2F', b'\\x84',\n b'\\x53', b'\\xD1', b'\\x00', b'\\xED', b'\\x20', b'\\xFC', b'\\xB1', b'\\x5B', b'\\x6A', b'\\xCB', b'\\xBE', b'\\x39', b'\\x4A', b'\\x4C', b'\\x58', b'\\xCF',\n b'\\xD0', b'\\xEF', b'\\xAA', b'\\xFB', b'\\x43', b'\\x4D', b'\\x33', b'\\x85', b'\\x45', b'\\xF9', b'\\x02', b'\\x7F', b'\\x50', b'\\x3C', b'\\x9F', b'\\xA8',\n b'\\x51', b'\\xA3', b'\\x40', b'\\x8F', b'\\x92', b'\\x9D', b'\\x38', b'\\xF5', b'\\xBC', b'\\xB6', b'\\xDA', b'\\x21', b'\\x10', b'\\xFF', b'\\xF3', b'\\xD2',\n b'\\xCD', b'\\x0C', b'\\x13', b'\\xEC', b'\\x5F', b'\\x97', b'\\x44', b'\\x17', b'\\xC4', b'\\xA7', b'\\x7E', b'\\x3D', b'\\x64', b'\\x5D', b'\\x19', b'\\x73',\n b'\\x60', b'\\x81', b'\\x4F', b'\\xDC', b'\\x22', b'\\x2A', b'\\x90', b'\\x88', b'\\x46', b'\\xEE', b'\\xB8', b'\\x14', b'\\xDE', b'\\x5E', b'\\x0B', b'\\xDB',\n b'\\xE0', b'\\x32', b'\\x3A', b'\\x0A', b'\\x49', b'\\x06', b'\\x24', b'\\x5C', b'\\xC2', b'\\xD3', b'\\xAC', b'\\x62', b'\\x91', b'\\x95', b'\\xE4', b'\\x79',\n b'\\xE7', b'\\xC8', b'\\x37', b'\\x6D', b'\\x8D', b'\\xD5', b'\\x4E', b'\\xA9', b'\\x6C', b'\\x56', b'\\xF4', b'\\xEA', b'\\x65', b'\\x7A', b'\\xAE', b'\\x08',\n b'\\xBA', b'\\x78', b'\\x25', b'\\x2E', b'\\x1C', b'\\xA6', b'\\xB4', b'\\xC6', b'\\xE8', b'\\xDD', b'\\x74', b'\\x1F', b'\\x4B', b'\\xBD', b'\\x8B', b'\\x8A',\n b'\\x70', b'\\x3E', b'\\xB5', b'\\x66', b'\\x48', b'\\x03', b'\\xF6', b'\\x0E', b'\\x61', b'\\x35', b'\\x57', b'\\xB9', b'\\x86', b'\\xC1', b'\\x1D', b'\\x9E',\n b'\\xE1', b'\\xF8', b'\\x98', b'\\x11', b'\\x69', b'\\xD9', b'\\x8E', b'\\x94', b'\\x9B', b'\\x1E', b'\\x87', b'\\xE9', b'\\xCE', b'\\x55', b'\\x28', b'\\xDF',\n b'\\x8C', b'\\xA1', b'\\x89', b'\\x0D', b'\\xBF', b'\\xE6', b'\\x42', b'\\x68', b'\\x41', b'\\x99', b'\\x2D', b'\\x0F', b'\\xB0', b'\\x54', b'\\xBB', b'\\x16'\n]\n\n# Curve\nEllipticCurve = collections.namedtuple('EllipticCurve', 'name p a b g n h')\n\ncurve = EllipticCurve(\n 'secp256k1',\n # Field characteristic.\n p=0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,\n # Curve coefficients.\n a=0,\n b=7,\n # Base point.\n g=(0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798,\n 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8),\n # Subgroup order.\n n=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141,\n # Subgroup cofactor.\n h=1,\n)\n\n\ndef long_to_bytes(val, endianness='big'):\n \"\"\"\n Use :ref:`string formatting` and :func:`~binascii.unhexlify` to\n convert ``val``, a :func:`long`, to a byte :func:`str`.\n :param long val: The value to pack\n :param str endianness: The endianness of the result. ``'big'`` for\n big-endian, ``'little'`` for little-endian.\n If you want byte- and word-ordering to differ, you're on your own.\n Using :ref:`string formatting` lets us use Python's C innards.\n \"\"\"\n\n # one (1) hex digit per four (4) bits\n width = val.bit_length()\n\n # unhexlify wants an even multiple of eight (8) bits, but we don't\n # want more digits than we need (hence the ternary-ish 'or')\n width += 8 - ((width % 8) or 8)\n\n # format width specifier: four (4) bits per hex digit\n fmt = '%%0%dx' % (width // 4)\n\n # prepend zero (0) to the width, to zero-pad the output\n s = unhexlify(fmt % val)\n\n if endianness == 'little':\n # see http://stackoverflow.com/a/931095/309233\n s = s[::-1]\n\n return s\n\n\ndef inverse_mod(k, p):\n \"\"\"Returns the inverse of k modulo p.\n This function returns the only integer x such that (x * k) % p == 1.\n k must be non-zero and p must be a prime.\n \"\"\"\n if k == 0:\n raise ZeroDivisionError('division by zero')\n\n if k < 0:\n # k ** -1 = p - (-k) ** -1 (mod p)\n return p - inverse_mod(-k, p)\n\n # Extended Euclidean algorithm.\n s, old_s = 0, 1\n t, old_t = 1, 0\n r, old_r = p, k\n\n while r != 0:\n quotient = old_r // r\n old_r, r = r, old_r - quotient * r\n old_s, s = s, old_s - quotient * s\n old_t, t = t, old_t - quotient * t\n\n gcd, x, y = old_r, old_s, old_t\n\n assert gcd == 1\n assert (k * x) % p == 1\n\n return x % p\n\n\ndef is_on_curve(point):\n \"\"\"Returns True if the given point lies on the elliptic curve.\"\"\"\n if point is None:\n # None represents the point at infinity.\n return True\n\n x, y = point\n\n return (y * y - x * x * x - curve.a * x - curve.b) % curve.p == 0\n\n\ndef point_add(point1, point2):\n \"\"\"Returns the result of point1 + point2 according to the group law.\"\"\"\n assert is_on_curve(point1)\n assert is_on_curve(point2)\n\n if point1 is None:\n # 0 + point2 = point2\n return point2\n if point2 is None:\n # point1 + 0 = point1\n return point1\n\n x1, y1 = point1\n x2, y2 = point2\n\n if x1 == x2 and y1 != y2:\n # point1 + (-point1) = 0\n return None\n\n if x1 == x2:\n # This is the case point1 == point2.\n m = (3 * x1 * x1 + curve.a) * inverse_mod(2 * y1, curve.p)\n else:\n # This is the case point1 != point2.\n m = (y1 - y2) * inverse_mod(x1 - x2, curve.p)\n\n x3 = m * m - x1 - x2\n y3 = y1 + m * (x3 - x1)\n result = (x3 % curve.p, -y3 % curve.p)\n\n assert is_on_curve(result)\n\n return result\n\n\ndef scalar_mult(k, point):\n \"\"\"Returns k * point computed using the double and point_add algorithm.\"\"\"\n assert is_on_curve(point)\n\n if k % curve.n == 0 or point is None:\n return None\n\n if k < 0:\n # k * point = -k * (-point)\n return scalar_mult(-k, point_neg(point))\n\n result = None\n addend = point\n\n while k:\n if k & 1:\n # Add.\n result = point_add(result, addend)\n\n # Double.\n addend = point_add(addend, addend)\n\n k >>= 1\n\n assert is_on_curve(result)\n\n return result\n\n\ndef make_keypair():\n \"\"\"Generates a random private-public key pair.\"\"\"\n private_key = random.randrange(1, curve.n)\n public_key = scalar_mult(private_key, curve.g)\n\n return private_key, public_key\n\n\ndef byte_mul(byte1, byte2):\n mul = hex((int.from_bytes(byte1, 'big') *\n int.from_bytes(byte2, 'big')) & 0xFF)\n result = mul.encode('utf-8')\n return result\n\n\ndef xorbyte(var, key):\n return bytes(a ^ b for a, b in zip(var, key))\n\n\ndef subbyte(var):\n var = bytes(S[int.from_bytes(var, \"big\")])\n return var\n\n\ndef shift_row(byte_array, length):\n # Left Shift\n\n new_array = bytearray(byte_array)\n for i in range(0, length):\n if(i == 4):\n new_array[i:i+1] = byte_array[i+1:i+2]\n elif(i == 5):\n new_array[i:i+1] = byte_array[i+1:i+2]\n elif(i == 6):\n new_array[i:i+1] = byte_array[i+1:i+2]\n elif(i == 7):\n new_array[i:i+1] = byte_array[4:5]\n elif(i == 8):\n new_array[i:i+1] = byte_array[i+2:i+3]\n elif(i == 9):\n new_array[i:i+1] = byte_array[i+2:i+3]\n elif(i == 10):\n new_array[i:i+1] = byte_array[8:9]\n elif(i == 11):\n new_array[i:i+1] = byte_array[9:10]\n elif(i == 12):\n new_array[i:i+1] = byte_array[i+3:i+4]\n elif(i == 13):\n new_array[i:i+1] = byte_array[12:13]\n elif(i == 14):\n new_array[i:i+1] = byte_array[13:14]\n elif(i == 15):\n new_array[i:i+1] = byte_array[14:15]\n\n return new_array\n\n\ndef mix_column(byte_array, length):\n\n return byte_array\n\n\ndef AES_encrypt(origin_byte, key, length):\n\n if(length < 16):\n for i in range(0, 16 - length):\n origin_byte.append(0)\n\n byte_array = bytearray(origin_byte)\n\n # Round 0\n for i in range(0, 16):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n\n # Round 1 ~ 10\n for j in range(0, 10):\n if(j == 9):\n for i in range(0, 16):\n byte_array[i:i+1] = subbyte(byte_array[i:i+1])\n byte_array = shift_row(byte_array, 16)\n for i in range(0, 16):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n else:\n for i in range(0, 16):\n byte_array[i:i+1] = subbyte(byte_array[i:i+1])\n byte_array = shift_row(byte_array, 16)\n byte_array = mix_column(byte_array, 16)\n for i in range(0, 16):\n byte_array[i:i+1] = xorbyte(bytes(byte_array[i:i+1]), bytes(key[i:i+1]))\n\n byte_array = byte_array[0:length]\n return byte_array\n\n\ndef add_packet_control_byte(number, origin_byte):\n byte_array = bytearray()\n byte_array.append(number)\n byte_array += origin_byte\n return byte_array\n\n\ndef read_packet_control_byte(origin_byte):\n control_byte = origin_byte[0:1]\n byte_array = bytearray(origin_byte[1:])\n return control_byte, byte_array\n\n\ndef file_transmission(sender, key, h_key):\n byte_array = bytearray()\n encrypted_array = bytearray()\n ch = bytearray()\n Pack_Size = 0\n send_size = 0\n file_name = \"\"\n List = []\n\n # Wait request\n print(\"Wait for request\")\n while(1):\n request_packet = sender.recv(100)\n control_byte, request_packet = read_packet_control_byte(request_packet)\n if(control_byte == b'\\x02'):\n file_name = request_packet.decode(\"utf-8\")\n break\n\n # Read file in binary\n f = open(file_name, \"rb\")\n file_size = os.stat(file_name)[stat.ST_SIZE]\n for i in range(0, file_size):\n ch = f.read(1)\n List.append(ch)\n\n start_time = time.time()\n # Sender send file data\n for i in range(0, file_size):\n Pack_Size = Pack_Size + 1\n byte_array += List[i]\n if(Pack_Size == Max_pack_size or i == file_size - 1):\n if(Pack_Size == Max_pack_size):\n array_offset = 0\n encrypted_array.append(0)\n for j in range(0, 16):\n encrypted_array += bytearray(AES_encrypt(\n byte_array[array_offset:array_offset+16], key, 16))\n array_offset += 16\n send_size += 256\n elif(i == file_size - 1):\n remain_size = file_size - send_size\n encrypted_array.append(0)\n array_offset = 0\n while(1):\n if(remain_size < 16):\n encrypted_array += bytearray(AES_encrypt(\n byte_array[array_offset:array_offset+remain_size], key, remain_size))\n send_size += remain_size\n break\n else:\n encrypted_array += bytearray(AES_encrypt(\n byte_array[array_offset:array_offset+16], key, 16))\n send_size += 16\n array_offset += 16\n remain_size -= 16\n # Packet size = 256 + 32 + 1bytes\n encrypted_array += hmac.new(h_key,\n encrypted_array[1:], hashlib.sha256).digest()\n sender.send(encrypted_array)\n Pack_Size = 0\n byte_array.clear()\n encrypted_array.clear()\n # time.sleep(1/1000000)\n for j in range(0, 5):\n for k in range(0, 5):\n k = k\n\n print(\"Completed sending\")\n print(\"Consecution time: %s second\" % (time.time() - start_time))\n print()\n f.close()\n\n\ndef wait_public_key(sender, key, h_key):\n\n print(\"Wait for Key\")\n key_packet = sender.recv(65)\n control_byte, key_packet = read_packet_control_byte(key_packet)\n #print(\"Origin key: \", key)\n #print(\"Origin h key: \", h_key)\n if(control_byte == b'\\x01'):\n r = random.randrange(1, curve.n)\n R = scalar_mult(r, curve.g)\n R_packet = bytearray()\n R_packet = add_packet_control_byte(4, R_packet)\n R_packet += long_to_bytes(R[0])\n R_packet += long_to_bytes(R[1])\n sender.send(R_packet)\n\n public_key = [int.from_bytes(bytes(key_packet[0:32]), \"big\"),\n int.from_bytes(bytes(key_packet[32:64]), \"big\")]\n ecc_key = scalar_mult(r, public_key)\n ecc_key_byte = bytearray()\n ecc_key_byte += long_to_bytes(ecc_key[0])\n ecc_key_byte += long_to_bytes(ecc_key[1])\n ecc_key_byte = hashlib.sha256(ecc_key_byte).digest()\n key.clear()\n h_key.clear()\n key += ecc_key_byte[0:16]\n h_key += ecc_key_byte[16:32]\n\n print(\"Generate ECC key Successed\")\n print()\n #print(\"New key: \", key)\n else:\n print(\"Generate ECC key failed\")\n\n\ndef find_file(sender):\n wait_packet = bytearray()\n send_packet = bytearray()\n number_of_files = 0\n send_packet = add_packet_control_byte(6, send_packet)\n wait_packet = sender.recv(100)\n control_byte, wait_packet = read_packet_control_byte(wait_packet)\n if(control_byte == b'\\x05'):\n files = os.listdir(os.getcwd())\n for i in files:\n fullpath = os.path.join(os.getcwd(), i)\n if os.path.isfile(fullpath):\n number_of_files += 1\n filesys_file_name.append(i)\n filesys_file_size[i] = os.stat(i)[stat.ST_SIZE]\n\n send_packet.append(number_of_files)\n for i in range(0, len(filesys_file_name)):\n send_packet += (filesys_file_name[i] + '\\n' +\n str(filesys_file_size[filesys_file_name[i]]) + '\\n').encode(\"utf-8\")\n\n sender.send(send_packet)\n else:\n print(\"Requset error\")\n\n\nif __name__ == '__main__':\n\n # Static\n key = bytearray()\n h_key = bytearray()\n\n # Encrypt key\n key_count = 0\n for i in range(0, 16):\n key.append(key_count)\n key_count += 1\n # Hmac key\n for i in range(0, 16):\n h_key.append(1)\n\n # Connect\n sender = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sender.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)\n sender.bind((Local_ip, Local_port))\n sender.connect((Dest_ip, Dest_port))\n print(\"It's Server\")\n print(\"Success connect to\", str((Dest_ip, Dest_port)))\n print()\n wait_public_key(sender, key, h_key)\n find_file(sender)\n while(1):\n print(\"Choose function:\")\n print(\"\\tType 1 for file transmission\")\n print(\"\\tType 0 exit\")\n function = input()\n if(function == '1'):\n file_transmission(sender, key, h_key)\n elif(function == '0'):\n break\n\n print(\"Server Closed!\")\n sender.shutdown(2)\n sender.close()\n"
},
{
"alpha_fraction": 0.5304286479949951,
"alphanum_fraction": 0.544985830783844,
"avg_line_length": 29.250764846801758,
"blob_id": "7e4d06f4b6a5714755bf29e780ba49d64b3bc74f",
"content_id": "529b35a5023f383d1e4322d7b792ac4f5a66834d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 9892,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 327,
"path": "/NXP_file/tcpecho.c",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "UTF-8",
"text": "/*\n * Copyright (c) 2001-2003 Swedish Institute of Computer Science.\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without modification,\n * are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED\n * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT\n * SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT\n * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING\n * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY\n * OF SUCH DAMAGE.\n *\n * This file is part of the lwIP TCP/IP stack.\n *\n * Author: Adam Dunkels <[email protected]>\n *\n */\n#include \"tcpecho.h\"\n#include \"decrypt.h\"\n#include \"lwip/igmp.h\"\n#include \"lwip/sockets.h\"\n#include \"lwip/opt.h\"\n#include \"lwip/tcp.h\"\n#include <string.h>\n#include <stdio.h>\n#include \"ECC/uECC.h\"\n\n#if LWIP_NETCONN\n\n#include \"lwip/sys.h\"\n#include \"lwip/api.h\"\n\n/*-----------------------------------------------------------------------------------*/\n\n//Client 10.1.1.2 12345\n//Server 10.1.1.2 54321\nstatic struct netconn *server_conn;\nstatic int server_connected = 0;\nstatic struct netconn *client_conn;\nstatic int client_connected = 0;\n\nstatic byte* key_packet;\nstatic uint8_t *ecc_pri_key;\nstatic uint8_t *ecc_pub_key;\nstatic const struct uECC_Curve_t *curve;\n\n\n\nstatic void\ntcpecho_thread_client(void *arg)\n{\n struct netconn *conn, *newconn;\n err_t err;\n LWIP_UNUSED_ARG(arg);\n\n /* Create a new connection identifier. */\n /* Bind connection to well known port number 7. */\n#if LWIP_IPV6\n conn = netconn_new(NETCONN_TCP_IPV6);\n netconn_bind(conn, IP6_ADDR_ANY, 54321);\n#else /* LWIP_IPV6 */\n conn = netconn_new(NETCONN_TCP);\n netconn_bind(conn, IP_ADDR_ANY, 7);\n#endif /* LWIP_IPV6 */\n LWIP_ERROR(\"tcpecho: invalid conn\", (conn != NULL), return;);\n\n /* Tell connection to go into listening mode. */\n netconn_listen(conn);\n init_key(ecc_pri_key,ecc_pub_key);\n\n\n while (1) {\n\n /* Grab new connection. */\n err = netconn_accept(conn, &client_conn);\n client_connected = 1;\n\n /* Process the new connection. */\n if (err == ERR_OK) {\n struct netbuf *buf;\n byte *data;\n u16_t len;\n\n while ((err = netconn_recv(client_conn, &buf)) == ERR_OK) {\n do {\n netbuf_data(buf, &data, &len);\n\n if(server_connected == 1)\n {\n switch(read_control_byte(data, len))\n {\n case 0:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 1:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 2:\n err = netconn_write(server_conn, data, len, NETCONN_COPY);\n break;\n case 3:\n curve = uECC_secp256k1();\n ecc_pub_key = (byte*)malloc(64 * sizeof(byte));\n ecc_pri_key = (byte*)malloc(32 * sizeof(byte));\n uECC_make_key(ecc_pub_key, ecc_pri_key, curve);\n //Send ECC Public Key\n\n key_packet = (byte*)malloc(65*sizeof(byte));\n key_packet[0] = 1;\n int i;\n for(i = 0;i < 64;i++)\n {\n key_packet[i+1] = ecc_pub_key[i];\n }\n\n err = netconn_write(server_conn, key_packet, 65, NETCONN_COPY);\n free(key_packet);\n break;\n case 4:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 5:\n err = netconn_write(server_conn, data, len, NETCONN_COPY);\n break;\n case 6:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 7:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 9:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n }\n\n\n }\n\n\n } while (/*netbuf_next(buf) >= 0*/ 0);\n netbuf_delete(buf);\n }\n netconn_close(client_conn);\n netconn_delete(client_conn);\n client_connected = 0;\n }\n\n }\n}\n\nstatic void\ntcpecho_thread_server(void *arg)\n{\n struct netconn *conn, *newconn;\n err_t err;\n LWIP_UNUSED_ARG(arg);\n\n /* Create a new connection identifier. */\n /* Bind connection to well known port number 7. */\n#if LWIP_IPV6\n conn = netconn_new(NETCONN_TCP_IPV6);\n netconn_bind(conn, IP6_ADDR_ANY, 12345);\n#else /* LWIP_IPV6 */\n conn = netconn_new(NETCONN_TCP);\n netconn_bind(conn, IP_ADDR_ANY, 7);\n#endif /* LWIP_IPV6 */\n LWIP_ERROR(\"tcpecho: invalid conn\", (conn != NULL), return;);\n\n /* Tell connection to go into listening mode. */\n netconn_listen(conn);\n init_key(ecc_pri_key,ecc_pub_key);\n\n\n while (1) {\n\n /* Grab new connection. */\n err = netconn_accept(conn, &server_conn);\n server_connected = 1;\n\n /* Process the new connection. */\n if (err == ERR_OK) {\n struct netbuf *buf;\n byte *data;\n u16_t len;\n\n while ((err = netconn_recv(server_conn, &buf)) == ERR_OK) {\n do {\n netbuf_data(buf, &data, &len);\n byte *sig = (byte*)malloc(32 *sizeof(byte));\n\n if(client_connected == 1)\n {\n switch(read_control_byte(data, len))\n {\n case 0:\n //Check Mac\n\n if(check_hmac(data, len, sig) == 1)\n {\n //Mac Error\n data[0] = '9';\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n }\n else\n {\n split_package((data + 1), len - 1 -32);\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n }\n break;\n case 1:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 2:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 3:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 4:\n generate_key(data+1, 64, ecc_pri_key);\n break;\n case 5:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 6:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 7:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n case 9:\n err = netconn_write(client_conn, data, len, NETCONN_COPY);\n break;\n\n }\n\n\n }\n free(sig);\n\n } while (/*netbuf_next(buf) >= 0*/ 0);\n netbuf_delete(buf);\n }\n netconn_close(server_conn);\n netconn_delete(server_conn);\n server_connected = 0;\n }\n\n }\n}\n\nstatic void\ntcpecho_thread(void *arg)\n{\n struct netconn *conn, *newconn;\n err_t err;\n LWIP_UNUSED_ARG(arg);\n\n /* Create a new connection identifier. */\n /* Bind connection to well known port number 7. */\n#if LWIP_IPV6\n conn = netconn_new(NETCONN_TCP_IPV6);\n netconn_bind(conn, IP6_ADDR_ANY, 8888);\n#else /* LWIP_IPV6 */\n conn = netconn_new(NETCONN_TCP);\n netconn_bind(conn, IP_ADDR_ANY, 7);\n#endif /* LWIP_IPV6 */\n LWIP_ERROR(\"tcpecho: invalid conn\", (conn != NULL), return;);\n\n /* Tell connection to go into listening mode. */\n netconn_listen(conn);\n //init_key(ecc_pri_key, ecc_pub_key);\n\n\n while (1) {\n\n /* Grab new connection. */\n err = netconn_accept(conn, &newconn);\n\n\n /* Process the new connection. */\n if (err == ERR_OK) {\n struct netbuf *buf;\n void *data;\n u16_t len;\n\n while ((err = netconn_recv(newconn, &buf)) == ERR_OK) {\n do {\n netbuf_data(buf, &data, &len);\n\n err = netconn_write(newconn, data, len, NETCONN_COPY);\n\n\n } while (netbuf_next(buf) >= 0);\n netbuf_delete(buf);\n }\n netconn_close(newconn);\n netconn_delete(newconn);\n }\n\n }\n}\n/*-----------------------------------------------------------------------------------*/\nvoid\ntcpecho_init(void)\n{\n sys_thread_new(\"tcpecho_thread_server\", tcpecho_thread_server, NULL, DEFAULT_THREAD_STACKSIZE, tskIDLE_PRIORITY + 1 +1);\n sys_thread_new(\"tcpecho_thread_client\", tcpecho_thread_client, NULL, DEFAULT_THREAD_STACKSIZE, tskIDLE_PRIORITY + 1 +2);\n sys_thread_new(\"tcpecho_thread\", tcpecho_thread, NULL, DEFAULT_THREAD_STACKSIZE, tskIDLE_PRIORITY + 1 +1);\n}\n/*-----------------------------------------------------------------------------------*/\n\n#endif /* LWIP_NETCONN */\n"
},
{
"alpha_fraction": 0.6538461446762085,
"alphanum_fraction": 0.6861042380332947,
"avg_line_length": 21.38888931274414,
"blob_id": "b18c5fef1aef43f2561f07c6f407f13a844950f3",
"content_id": "a4bcc8c355fc8f6e5a71326f2ab4df1cef37801a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 811,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 36,
"path": "/NXP_file/decrypt.h",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "WINDOWS-1252",
"text": "/*\n * decrypt.h\n *\n * Created on: 2021¦~3¤ë3¤é\n * Author: user\n */\n\n#include <lwip/arch.h>\n\n\n#ifndef DECRYPT_H_\n#define DECRYPT_H_\n\ntypedef unsigned char byte;\n\nvoid generate_key(byte *R, u16_t len, byte *ecc_pub_key);\nint read_control_byte(byte* data, u16_t length);\nbyte xor_byte(byte* data, u16_t i);\nbyte reverse_subbyte(byte* d);\nvoid reverse_shift_row(byte* data, u16_t length);\nvoid reverse_mix_column(byte* data, u16_t length);\nvoid split_package(byte* data ,u16_t data_size);\nvoid init_key(byte *ecc_pri_key, byte *ecc_pub_key);\nint check_hmac(byte* data, u16_t len, byte* sig);\nvoid ecc_generate_key(void);\nvoid decrypt(byte* data, u16_t length);\nvoid udp_send_message_to_client(byte *data, u16_t len);\nvoid tcp_send_message_to_server(byte *data, u16_t len);\n\n\n\n\n\n\n\n#endif /* DECRYPT_H_ */\n"
},
{
"alpha_fraction": 0.5772727131843567,
"alphanum_fraction": 0.6621212363243103,
"avg_line_length": 16.3157901763916,
"blob_id": "af123b3f171fe0233a732ea7fd4e0621b9242454",
"content_id": "6262d65b0d0bbe2bd610411335efa7cfea93d677",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 664,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 38,
"path": "/README.md",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "UTF-8",
"text": "# Project\n\n## 架構\n\tClient <-> (TCP) NXP (TCP) <-> Server\n### Client <-> (TCP) NXP\n\tClient IP: 10.1.1.2\n\tClient Port: 54321\n\t\n\tNXP IP: 10.1.1.4\n\tNXP Port: 54321\n### NXP (TCP) <-> Server\n\tNXP IP: 10.1.1.4\n\tNXP Port: 12345\n\t\n\tClient IP: 10.1.1.2\n\tClient Port: 12345\n\n\n## Packet Control Byte:\n### First byte in payload\n\t00: data packet\n\t01: ECIES public key packet\n\t02: File Request packet\n\t03: generate key\n\t04: ECIES R packet\n\t05: Request File system packet\n\t06: Response File system packet\n\t09: Mac Error\n\n\n### File information packet format:\n\tfile name \\n\n\tfile size \\n\n\n### File System packet format:\n\tnumber of files(first byte)\n\tfile name \\n\n\tfile size \\n\n\t\n"
},
{
"alpha_fraction": 0.4439522922039032,
"alphanum_fraction": 0.577512800693512,
"avg_line_length": 18.042207717895508,
"blob_id": "e81412a97cc06f3aa7ae6c4396582d8f6cf7192d",
"content_id": "b963e9ccd0517ea947b7c03e867968d81c5bc941",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 5875,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 308,
"path": "/NXP_file/decrypt.c",
"repo_name": "fancyshon/Graduation_Topic",
"src_encoding": "WINDOWS-1252",
"text": "/*\n * decrypt.c\n *\n * Created on: 2021¦~3¤ë3¤é\n * Author: user\n */\n#include <stdio.h>\n#include <stdint.h>\n#include <stdlib.h>\n#include \"decrypt.h\"\n#include \"lwip/api.h\"\n#include \"hmac_sha2.h\"\n#include \"ECC/uECC.h\"\n\n\n\nstatic int count = 0;\n\nstatic byte *key;\nstatic byte *h_key;\nstatic const struct uECC_Curve_t *curve;\nstatic byte* ecc_key;\nstatic byte* ecc_key_sha256;\n\nstatic byte inv_S[256] = { 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,\n\t0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,\n\t0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,\n\t0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,\n\t0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,\n\t0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,\n\t0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,\n\t0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,\n\t0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,\n\t0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,\n\t0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,\n\t0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,\n\t0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,\n\t0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,\n\t0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,\n\t0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d };\n\n\n\nbyte* get_key(void)\n{\n\treturn key;\n}\nbyte* get_h_key(void)\n{\n\treturn h_key;\n}\nbyte* get_ecc_key(void)\n{\n\treturn ecc_key;\n}\nbyte* get_ecc_key_sha256(void)\n{\n\treturn ecc_key_sha256;\n}\n\nvoid generate_key(byte *R, u16_t len, byte *ecc_pri_key)\n{\n\tecc_key = (byte*)malloc(64*sizeof(byte));\n\n\tcurve = uECC_secp256k1();\n\tuECC_shared_secret(R, ecc_pri_key, ecc_key, curve);\n\n\tecc_key_sha256 = (byte*)malloc(32*sizeof(byte));\n\tsha256(ecc_key, 64, ecc_key_sha256);\n\tint i;\n\tfor(i = 0;i < 16;i++)\n\t{\n\t\tkey[i] = ecc_key_sha256[i];\n\t}\n\tfor(i = 0;i < 16;i++)\n\t{\n\t\th_key[i] = ecc_key_sha256[i+16];\n\t}\n}\n\n\nvoid init_key(byte* ecc_pri_key,byte* ecc_pub_key)\n{\n\tif(count == 0)\n\t{\n\t\tcount++;\n\t\tkey = (byte*)malloc(16 * sizeof(byte));\n\t\tint i;\n\t\tint key_count = 0;\n\n\t\tfor(i = 0;i < 16;i++)\n\t\t{\n\t\t\tkey[i] = (byte)key_count;\n\t\t\tkey_count += 1;\n\t\t}\n\t\th_key = (byte*)malloc(16 * sizeof(byte));\n\t\tfor(i = 0;i < 16;i++)\n\t\t{\n\t\t\th_key[i] = (byte)1;\n\t\t}\n\n\n\t}\n}\n\nint read_control_byte(byte* data, u16_t length)\n{\n\tint read = 0;\n\tread = (int)data[0];\n\n\treturn read;\n\n}\n\n//True return 1, false return 0\nint check_hmac(byte* data, u16_t len, byte* sig)\n{\n\n\tbyte *hmac_array = (byte*)malloc(32 * sizeof(byte));\n\tmemcpy(hmac_array, data + len - 32, 32);\n\thmac_sha256(h_key, 16, data + 1, len - 32 - 1, sig, 32);\n\n\tint i;\n\tfor(i = 0;i < 32;i++)\n\t{\n\t\tif(hmac_array[i] != sig[i])\n\t\t{\n\t\t\tfree(hmac_array);\n\t\t\treturn 1;\n\t\t}\n\t}\n\tfree(hmac_array);\n\n\treturn 0;\n\n}\n\n\n\nbyte xor_byte(byte* data, u16_t i)\n{\n\treturn data[0] ^ key[i];\n}\n\nbyte reverse_subbyte(byte* d)\n{\n\tint i = (int)d[0];\n\treturn inv_S[i];\n}\n\nvoid reverse_shift_row(byte* data, u16_t length)\n{\n\t/*\n\t * 0123\n\t * 4567\n * 89AB\n * CDEF\n\t */\n\n\t//Right Shift\n\tbyte* temp_data;\n\ttemp_data = (byte* )malloc(sizeof(byte) * length);\n\tmemcpy(temp_data, data, length);\n\tint i;\n\tfor(i = 0;i < length;i++)\n\t{\n\t\tswitch(i)\n\t\t{\n\t\tcase 4:\n\t\t\tdata[i] = temp_data[7];\n\t\t\tbreak;\n\t\tcase 5:\n\t\t\tdata[i] = temp_data[4];\n\t\t\tbreak;\n\t\tcase 6:\n\t\t\tdata[i] = temp_data[5];\n\t\t\tbreak;\n\t\tcase 7:\n\t\t\tdata[i] = temp_data[6];\n\t\t\tbreak;\n\t\tcase 8:\n\t\t\tdata[i] = temp_data[10];\n\t\t\tbreak;\n\t\tcase 9:\n\t\t\tdata[i] = temp_data[11];\n\t\t\tbreak;\n\t\tcase 10:\n\t\t\tdata[i] = temp_data[8];\n\t\t\tbreak;\n\t\tcase 11:\n\t\t\tdata[i] = temp_data[9];\n\t\t\tbreak;\n\t\tcase 12:\n\t\t\tdata[i] = temp_data[13];\n\t\t\tbreak;\n\t\tcase 13:\n\t\t\tdata[i] = temp_data[14];\n\t\t\tbreak;\n\t\tcase 14:\n\t\t\tdata[i] = temp_data[15];\n\t\t\tbreak;\n\t\tcase 15:\n\t\t\tdata[i] = temp_data[12];\n\t\t\tbreak;\n\t\t}\n\t}\n\tfree(temp_data);\n}\n\n\nvoid reverse_mix_column(byte* data, u16_t length)\n{\n\n}\n\nvoid decrypt(byte* data, u16_t length)\n{\n\n\n\tint i,j;\n\t//Round 10 ~ 1\n\tfor(i = 0;i < 10;i++)\n\t{\n\t\tif(i == 0)\n\t\t{\n\t\t\tfor(j = 0;j < length;j++)\n\t\t\t{\n\t\t\t\tdata[j] = xor_byte(data + j, j);\n\t\t\t\tdata[j] = reverse_subbyte(data + j);\n\t\t\t}\n\t\t\treverse_shift_row(data, length);\n\t\t}\n\t\telse\n\t\t{\n\t\t\treverse_mix_column(data, length);\n\t\t\tfor(j = 0;j < length;j++)\n\t\t\t{\n\t\t\t\tdata[j] = xor_byte(data + j, j);\n\t\t\t\tdata[j] = reverse_subbyte(data + j);\n\t\t\t}\n\t\t\treverse_shift_row(data, length);\n\t\t}\n\t}\n\n\t//Round 0\n\tfor(i = 0;i < length;i++)\n\t{\n\t\tdata[i] = xor_byte(data + i, i);\n\t}\n\n}\n\nvoid split_package( byte* data, u16_t data_size)\n{\n\n\tu16_t data_length = data_size / sizeof(byte);\n\n\tint i;\n\n\tif(data_length == 256)\n\t{\n\t\tu16_t offset = 0;\n\t\tfor(i = 0;i < 16;i++)\n\t\t{\n\t\t\tdecrypt(data + offset, 16);\n\t\t\toffset += 16;\n\t\t}\n\t}\n\telse\n\t{\n\t\tu16_t remain_length = data_length;\n\t\tu16_t offset = 0;\n\t\twhile(1)\n\t\t{\n\t\t\tif(remain_length < 16 && remain_length != 0)\n\t\t\t{\n\t\t\t\tbyte* new_data = (byte*)malloc(16* sizeof(byte));\n\t\t\t\tfor(i = 0;i < 16;i++)\n\t\t\t\t{\n\t\t\t\t\tif(i < remain_length)\n\t\t\t\t\t{\n\t\t\t\t\t\tnew_data[i] = data[offset + i];\n\t\t\t\t\t}\n\t\t\t\t\telse\n\t\t\t\t\t{\n\t\t\t\t\t\tnew_data[i] = 0;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tdecrypt(new_data, 16);\n\n\t\t\t\tfor(i = 0;i < remain_length;i++)\n\t\t\t\t{\n\t\t\t\t\tdata[offset+i] = new_data[i];\n\t\t\t\t}\n\n\t\t\t\tfree(new_data);\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tdecrypt(data + offset, 16);\n\t\t\t\toffset += 16;\n\t\t\t\tremain_length -= 16;\n\t\t\t}\n\t\t}\n\t}\n\n}\n\n\n\n\n\n"
}
] | 8 |
zzx2017/keras-multi-head | https://github.com/zzx2017/keras-multi-head | 6aed05f0b9e5171dd9df6445dd0d8e5e18eb49a0 | 49c296291d9b3cdd1dc406efd5a21293e838e260 | 1200e5046e25652e9b6749906120c036ea0104b9 | refs/heads/master | 2020-04-05T17:55:12.302074 | 2018-10-27T07:16:40 | 2018-11-09T05:16:55 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7307692170143127,
"alphanum_fraction": 0.807692289352417,
"avg_line_length": 12,
"blob_id": "9f8667e8e871e0efcef736d9d24d0315e88ecbd5",
"content_id": "b29715e0bd0017366c2746aedd3f4d00430e3bb6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 52,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 4,
"path": "/requirements.txt",
"repo_name": "zzx2017/keras-multi-head",
"src_encoding": "UTF-8",
"text": "numpy\ntensorflow\nKeras\nkeras-self-attention==0.30.0\n"
},
{
"alpha_fraction": 0.4976821839809418,
"alphanum_fraction": 0.5184498429298401,
"avg_line_length": 32.91823959350586,
"blob_id": "21d45803b3a72a1d23e3d05eea2c017ef363a982",
"content_id": "fd4d1ec4dfc5cbc6b09e0703cbea72faaccae42d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5393,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 159,
"path": "/tests/test_multi_head_attention.py",
"repo_name": "zzx2017/keras-multi-head",
"src_encoding": "UTF-8",
"text": "import os\nimport tempfile\nimport random\nimport unittest\nimport keras\nimport numpy as np\nfrom keras_multi_head import MultiHeadAttention\n\n\nclass TestMultiHead(unittest.TestCase):\n\n def test_sample(self):\n input_layer = keras.layers.Input(\n shape=(512,),\n name='Input',\n )\n embed_layer = keras.layers.Embedding(\n input_dim=12,\n output_dim=768,\n mask_zero=True,\n name='Embedding',\n )(input_layer)\n output_layer = MultiHeadAttention(\n head_num=12,\n name='Multi-Head',\n )(embed_layer)\n model = keras.models.Model(inputs=input_layer, outputs=output_layer)\n model.compile(\n optimizer='adam',\n loss='mse',\n metrics={},\n )\n model.summary()\n self.assertEqual((None, 512, 768), model.layers[-1].output_shape)\n\n def test_invalid_head_num(self):\n with self.assertRaises(IndexError):\n input_layer = keras.layers.Input(\n shape=(2, 3),\n name='Input',\n )\n MultiHeadAttention(\n head_num=2,\n name='Multi-Head',\n )(input_layer)\n\n def test_fit_self(self):\n input_layer = keras.layers.Input(\n shape=(2, 3),\n name='Input',\n )\n att_layer = MultiHeadAttention(\n head_num=3,\n name='Multi-Head-1',\n )(input_layer)\n dense_layer = keras.layers.Dense(units=3, name='Dense-1')(att_layer)\n att_layer = MultiHeadAttention(\n head_num=3,\n name='Multi-Head-2',\n )(dense_layer)\n output_layer = keras.layers.Dense(units=3, name='Dense-2')(att_layer)\n model = keras.models.Model(inputs=input_layer, outputs=output_layer)\n model.compile(\n optimizer='adam',\n loss='mse',\n metrics={},\n )\n model.summary()\n\n def _generator(batch_size=32):\n while True:\n inputs = np.random.random((batch_size, 2, 3))\n outputs = np.asarray([[[0.0, -0.1, 0.2]] * 2] * batch_size)\n yield inputs, outputs\n\n model.fit_generator(\n generator=_generator(),\n steps_per_epoch=1000,\n epochs=10,\n validation_data=_generator(),\n validation_steps=100,\n callbacks=[\n keras.callbacks.EarlyStopping(monitor='val_loss', patience=5)\n ],\n )\n model_path = os.path.join(tempfile.gettempdir(), 'test_save_load_%f.h5' % random.random())\n model.save(model_path)\n model = keras.models.load_model(model_path, custom_objects={\n 'MultiHeadAttention': MultiHeadAttention,\n })\n for inputs, _ in _generator(batch_size=3):\n predicts = model.predict(inputs)\n expect = np.asarray([[[0.0, -0.1, 0.2]] * 2] * 3)\n actual = np.round(predicts, decimals=1)\n self.assertTrue(np.allclose(expect, actual), (expect, actual))\n break\n\n def test_fit_multi(self):\n input_query = keras.layers.Input(\n shape=(2, 3),\n name='Input-Q',\n )\n input_key = keras.layers.Input(\n shape=(4, 5),\n name='Input-K',\n )\n input_value = keras.layers.Input(\n shape=(4, 6),\n name='Input-V',\n )\n att_layer = MultiHeadAttention(\n head_num=3,\n name='Multi-Head-1',\n )([input_query, input_key, input_value])\n dense_layer = keras.layers.Dense(units=3, name='Dense-1')(att_layer)\n att_layer = MultiHeadAttention(\n head_num=3,\n name='Multi-Head-2',\n )(dense_layer)\n output_layer = keras.layers.Dense(units=3, name='Dense-2')(att_layer)\n model = keras.models.Model(inputs=[input_query, input_key, input_value], outputs=output_layer)\n model.compile(\n optimizer='adam',\n loss='mse',\n metrics={},\n )\n model.summary()\n\n def _generator(batch_size=32):\n while True:\n inputs = [\n np.random.random((batch_size, 2, 3)),\n np.random.random((batch_size, 4, 5)),\n np.random.random((batch_size, 4, 6)),\n ]\n outputs = np.asarray([[[0.0, -0.1, 0.2]] * 2] * batch_size)\n yield inputs, outputs\n\n model.fit_generator(\n generator=_generator(),\n steps_per_epoch=1000,\n epochs=10,\n validation_data=_generator(),\n validation_steps=100,\n callbacks=[\n keras.callbacks.EarlyStopping(monitor='val_loss', patience=5)\n ],\n )\n model_path = os.path.join(tempfile.gettempdir(), 'test_save_load_%f.h5' % random.random())\n model.save(model_path)\n model = keras.models.load_model(model_path, custom_objects={\n 'MultiHeadAttention': MultiHeadAttention,\n })\n for inputs, _ in _generator(batch_size=3):\n predicts = model.predict(inputs)\n expect = np.asarray([[[0.0, -0.1, 0.2]] * 2] * 3)\n actual = np.round(predicts, decimals=1)\n self.assertTrue(np.allclose(expect, actual), (expect, actual))\n break\n"
},
{
"alpha_fraction": 0.5510112643241882,
"alphanum_fraction": 0.555505633354187,
"avg_line_length": 35.776859283447266,
"blob_id": "b0e75bd6d145510c89212f635a130de1a7d1f519",
"content_id": "d164885bbbd04475b61f783da940a430c94fa7d7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4450,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 121,
"path": "/keras_multi_head/multi_head_attention.py",
"repo_name": "zzx2017/keras-multi-head",
"src_encoding": "UTF-8",
"text": "import keras\nimport keras.backend as K\nfrom keras_self_attention import ScaledDotProductAttention\n\n\nclass MultiHeadAttention(keras.layers.Layer):\n \"\"\"Multi-head attention layer.\n\n See: https://arxiv.org/pdf/1706.03762.pdf\n \"\"\"\n\n def __init__(self,\n head_num,\n activation='relu',\n kernel_initializer='glorot_normal',\n kernel_regularizer=None,\n kernel_constraint=None,\n history_only=False,\n **kwargs):\n \"\"\"Initialize the layer.\n\n :param head_num: Number of heads.\n :param activation: Activations for linear mappings.\n :param kernel_initializer: Initializer for linear mappings.\n :param kernel_regularizer: Regularizer for linear mappings.\n :param kernel_constraint: Constraints for linear mappings.\n :param history_only: Whether to only use history in attention layer.\n \"\"\"\n self.supports_masking = True\n self.head_num = head_num\n self.activation = keras.activations.get(activation)\n self.kernel_initializer = keras.initializers.get(kernel_initializer)\n self.kernel_regularizer = keras.regularizers.get(kernel_regularizer)\n self.kernel_constraint = keras.constraints.get(kernel_constraint)\n self.history_only = history_only\n\n self.Wq, self.Wk, self.Wv, self.Wo = None, None, None, None\n super(MultiHeadAttention, self).__init__(**kwargs)\n\n def get_config(self):\n config = {\n 'head_num': self.head_num,\n 'activation': self.activation,\n 'kernel_initializer': self.kernel_initializer,\n 'kernel_regularizer': self.kernel_regularizer,\n 'kernel_constraint': self.kernel_constraint,\n 'history_only': self.history_only,\n }\n base_config = super(MultiHeadAttention, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\n def compute_output_shape(self, input_shape):\n if isinstance(input_shape, list):\n q, k, v = input_shape\n return q[:-1] + (v[-1],)\n return input_shape\n\n def compute_mask(self, inputs, input_mask=None):\n if isinstance(input_mask, list):\n return input_mask[0]\n return input_mask\n\n def build(self, input_shape):\n if isinstance(input_shape, list):\n q, k, v = input_shape\n else:\n q = k = v = input_shape\n feature_dim = v[-1]\n if feature_dim % self.head_num != 0:\n raise IndexError('Invalid head number %d with the given input dim %d' % (self.head_num, feature_dim))\n self.Wq = self.add_weight(\n shape=(q[-1], feature_dim),\n initializer=self.kernel_initializer,\n name='%s_Wq' % self.name,\n )\n self.Wk = self.add_weight(\n shape=(k[-1], feature_dim),\n initializer=self.kernel_initializer,\n name='%s_Wk' % self.name,\n )\n self.Wv = self.add_weight(\n shape=(v[-1], feature_dim),\n initializer=self.kernel_initializer,\n name='%s_Wv' % self.name,\n )\n self.Wo = self.add_weight(\n shape=(feature_dim, feature_dim),\n initializer=self.kernel_initializer,\n name='%s_Wo' % self.name,\n )\n super(MultiHeadAttention, self).build(input_shape)\n\n def call(self, inputs, mask=None):\n if isinstance(inputs, list):\n q, k, v = inputs\n else:\n q = k = v = inputs\n feature_dim = K.shape(v)[-1]\n head_dim = feature_dim // self.head_num\n q = K.dot(q, self.Wq)\n k = K.dot(k, self.Wk)\n v = K.dot(v, self.Wv)\n if self.activation is not None:\n q = self.activation(q)\n k = self.activation(k)\n v = self.activation(v)\n outputs = []\n for i in range(self.head_num):\n begin, end = i * head_dim, (i + 1) * head_dim\n outputs.append(ScaledDotProductAttention(\n history_only=self.history_only,\n name='%s-Att-%d' % (self.name, i + 1),\n )([\n q[:, :, begin:end],\n k[:, :, begin:end],\n v[:, :, begin:end],\n ]))\n y = K.dot(K.concatenate(outputs), self.Wo)\n if self.activation is not None:\n y = self.activation(y)\n return y\n"
}
] | 3 |
ImSunyoung/Math_Box | https://github.com/ImSunyoung/Math_Box | 54af09612e45aefcf1554faccfdf23295488fbf6 | efff64a265bff44e3cefe97bb71b3df33b81c387 | 58bce5a5cb57bae281f2de0c5c53c2b53845db6b | refs/heads/main | 2023-04-03T05:01:40.874332 | 2021-04-05T12:35:26 | 2021-04-05T12:35:26 | 354,775,809 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6231883764266968,
"alphanum_fraction": 0.6231883764266968,
"avg_line_length": 10.666666984558105,
"blob_id": "5ff34e4413a7ae6d35afa353aa922db585cd1778",
"content_id": "d292fe737b22313e84220dfab9aaca4f21239797",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 79,
"license_type": "no_license",
"max_line_length": 20,
"num_lines": 6,
"path": "/calculator.py",
"repo_name": "ImSunyoung/Math_Box",
"src_encoding": "UTF-8",
"text": "# 기본 계산기\ndef add(a, b):\n\treturn a+b\n\ndef substract(a, b):\n\treturn a-b"
}
] | 1 |
MarcelloSerqueira/Hyperparameter-Search-Deep-NN | https://github.com/MarcelloSerqueira/Hyperparameter-Search-Deep-NN | 74a3e84d5a2e0e5d31e9bf8e4793ff7f4c0cbdfb | 9b67ec97ff5554ae2888d84a1bdf5ca7c2126cef | 9c6ba4c0af44bad22fde21e74cd40ad3c0fbb681 | refs/heads/master | 2021-01-22T07:58:36.166191 | 2019-10-30T19:26:41 | 2019-10-30T19:26:41 | 102,321,996 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.8163265585899353,
"alphanum_fraction": 0.8163265585899353,
"avg_line_length": 48,
"blob_id": "3c5671aae9ac5ce125132400f8c002a8359729f8",
"content_id": "4ee5433a6ed0b823c71a3ffacd4e792eb417b851",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 49,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 1,
"path": "/README.md",
"repo_name": "MarcelloSerqueira/Hyperparameter-Search-Deep-NN",
"src_encoding": "UTF-8",
"text": "# Hyperparameter Evaluation in a Deep Neural Net\n"
},
{
"alpha_fraction": 0.5984537601470947,
"alphanum_fraction": 0.626238226890564,
"avg_line_length": 28.571428298950195,
"blob_id": "76386b74168bd647572973b25b6de8865975ab38",
"content_id": "443ec24616c785637083050f9a7156314fbd994c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4139,
"license_type": "no_license",
"max_line_length": 193,
"num_lines": 140,
"path": "/deep_neural_net.py",
"repo_name": "MarcelloSerqueira/Hyperparameter-Search-Deep-NN",
"src_encoding": "UTF-8",
"text": "import os\nos.environ['TF_CPP_MIN_LOG_LEVEL']='2'\nimport tensorflow as tf\nimport numpy as np\nimport data_utils as du\nimport sys\nimport datetime\nimport time\nfrom sklearn.metrics import precision_score, f1_score, recall_score, accuracy_score\n\ndef initialize_parameters():\n\tunits_layer1 = int(sys.argv[1])\n\tunits_layer2 = int(sys.argv[2])\n\tunits_layer3 = int(sys.argv[3])\n\n\tW1 = tf.Variable(tf.random_normal([num_x, units_layer1]))\n\tb1 = tf.Variable(tf.random_normal([units_layer1]))\n\n\tW2 = tf.Variable(tf.random_normal([units_layer1, units_layer2]))\n\tb2 = tf.Variable(tf.random_normal([units_layer2]))\n\n\tW3 = tf.Variable(tf.random_normal([units_layer2, units_layer3]))\n\tb3 = tf.Variable(tf.random_normal([units_layer3]))\n\n\tW_out = tf.Variable(tf.random_normal([units_layer3, n_classes]))\n\tb_out = tf.Variable(tf.random_normal([n_classes]))\n\n\tparameters = {\"W1\": W1,\n\t\t\t\t \"b1\": b1,\n\t\t\t\t \"W2\": W2,\n\t\t\t\t \"b2\": b2,\n\t\t\t\t \"W3\": W3,\n\t\t\t\t \"b3\": b3,\n\t\t\t\t \"W_out\": W_out,\n\t\t\t\t \"b_out\": b_out}\n\n\treturn parameters\n\ndef foward_propagation(data, parameters):\n\tW1 = parameters[\"W1\"]\n\tb1 = parameters[\"b1\"]\n\tW2 = parameters[\"W2\"]\n\tb2 = parameters[\"b2\"]\n\tW3 = parameters[\"W3\"]\n\tb3 = parameters[\"b3\"]\n\tW_out = parameters[\"W_out\"]\n\tb_out = parameters[\"b_out\"]\n\n\t#Forward Prop\n\tZ1 = tf.add(tf.matmul(data, W1), b1)\n\tA1 = tf.nn.relu(Z1)\n\n\tZ2 = tf.add(tf.matmul(A1, W2), b2)\n\tA2 = tf.nn.relu(Z2)\n\n\tZ3 = tf.add(tf.matmul(A2, W3), b3)\n\tA3 = tf.nn.relu(Z3)\n\n\tZ_out = tf.matmul(A3, W_out) + b_out\n\tA_out = tf.nn.relu(Z_out)\n\n\tcache = {\"Z1\": Z1,\n \"A1\": A1,\n \"Z2\": Z2,\n \"A2\": A2,\n \"Z3\": Z3,\n \"A3\": A3,\n \"Z_out\": Z_out,\n \"A_out\": A_out}\n\n\treturn Z_out\n\ndef nn_train(prediction, y, x, lr, W_out):\n\tlearning_rate = float(sys.argv[4])\n\tbeta = float(sys.argv[5])\n\n\tloss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y)) #Ou reduce_sum\n\tregularizer = tf.nn.l2_loss(W_out) #L2\n\tloss = tf.reduce_mean(loss + beta * regularizer)\n\n\toptimizer = tf.train.AdamOptimizer(lr).minimize(loss)\n\n\tepochs_no = 7\n\tbatch_size = 50\n\t\n\twith tf.Session() as sess:\n\t\tsess.run(tf.global_variables_initializer())\n\n\t\tfor epoch in range(epochs_no):\n\t\t\tepoch_loss = 0\n\t\t\ti=0\n\t\t\twhile i < len(trainX):\n\t\t\t\tstart = i\n\t\t\t\tend = i+batch_size\n\t\t\t\tbatch_x = np.array(trainX[start:end])\n\t\t\t\tbatch_y = np.array(trainY[start:end])\n\t\t\t\t_, c = sess.run([optimizer, loss], feed_dict={x: batch_x, y: batch_y, lr: learning_rate})\n\t\t\t\tepoch_loss += c\n\t\t\t\ti+=batch_size\n\t\t\tprint('Epoch', epoch+1, 'of', epochs_no, '| loss:', epoch_loss)\n\t\t\t#if epoch == 10:\n\t\t\t\t#learning_rate = 0.005 LR after 10 epochs...\n\n\t\tnn_performance_metrics(prediction, y, sess)\n\ndef nn_performance_metrics(prediction, y, sess):\n\t\tpred_model = tf.argmax(prediction, 1)\n\t\tpred_model = sess.run(pred_model, feed_dict={x:predX, y:predY})\n\t\tpred_true = tf.argmax(y, 1)\n\t\tpred_true = sess.run(pred_true, feed_dict={x:predX, y:predY})\n\n\t\tprecision = precision_score(pred_true, pred_model, average='macro')\n\t\trecall = recall_score(pred_true, pred_model, average='macro')\n\t\tf1 = f1_score(pred_true, pred_model, average='macro')\n\t\tacc = accuracy_score(pred_true, pred_model)\n\n\t\tprint('\\n')\n\t\tprint('================================')\n\t\tprint('Precision: ', precision, '\\n','Recall: ', recall, '\\n' 'F1-score: ', f1, '\\n' 'Accuracy: ', acc, '\\n', 'Time: ', time.time()-now)\n\t\tprint('================================')\n\t\tprint('\\n')\n\t\tend = datetime.datetime.now()\n\t\tprint('***', acc, start, end, os.environ['COMPUTERNAME'], os.path.basename(sys.argv[0]), int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), float(sys.argv[4]), float(sys.argv[5]), sep=\"|\")\n\n\nstart = datetime.datetime.now()\n\ntrainX, trainY, predX, predY, n_classes = du.csv_to_numpy_array(\"datasets\\mnist_train.csv\", \"datasets\\mnist_val.csv\")\n\nnum_x = trainX.shape[1]\nnum_y = trainY.shape[1]\n\nx = tf.placeholder(tf.float32, [None, num_x])\ny = tf.placeholder(tf.float32, [None, num_y])\nlr = tf.placeholder(tf.float32)\n\nnow = time.time()\nparameters = initialize_parameters()\nz_out = foward_propagation(x, parameters)\nnn_train(z_out, y, x, lr, parameters[\"W_out\"])"
}
] | 2 |
huongp112/Hotel-Review-Analysis | https://github.com/huongp112/Hotel-Review-Analysis | cc8845cba1b08dd9d1db0ea0dacc69afcf299b94 | 13ad549ef5bdf4a77a06ed0fdf6180115cc6ccf5 | 8ecd2e1517280b23ffaa9b2d916b4967dde0a305 | refs/heads/master | 2020-05-13T05:41:45.674479 | 2019-04-16T04:15:46 | 2019-04-16T04:15:46 | 181,612,047 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.712261974811554,
"alphanum_fraction": 0.7270088195800781,
"avg_line_length": 32.31075668334961,
"blob_id": "c82355ada51a8c75a5d1488c8e61f83edd55f4c5",
"content_id": "c70924478373dba60ae1b7989825c9bd04c99773",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8612,
"license_type": "no_license",
"max_line_length": 150,
"num_lines": 251,
"path": "/Hotel_Analysis_Part 3.py",
"repo_name": "huongp112/Hotel-Review-Analysis",
"src_encoding": "UTF-8",
"text": "\r\n# Batch imports of text processing libraries\r\nimport numpy as np\r\nimport scipy as sp\r\nimport nltk\r\nimport string\r\nglobal string\r\nimport scipy.sparse as sp\r\n\r\nfrom sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer\r\nfrom sklearn.naive_bayes import MultinomialNB\r\nfrom sklearn.linear_model import LogisticRegression\r\nfrom sklearn import svm\r\n\r\nimport pandas as pd #Import pandas library\r\n#import the clean csv file \r\npath = r'C:\\Users\\Huong Pham\\Documents\\Graduate School\\Winter 2019\\4 classes\\\\'\r\ndata = 'clean_data.csv'\r\ndf2= pd.read_csv(path+data)\r\nprint (df2.shape) #Dataframe df1 contains only reviewer score 3 & 10\r\nprint df2.head()\r\n\r\nfrom textblob import TextBlob\r\nfrom textblob import Word\r\nfrom nltk.stem.snowball import SnowballStemmer\r\n\r\n# Write a function to create sentimental scores for the Reviews\r\ndef sentiment_func(review):\r\n try:\r\n return TextBlob(review).sentiment.polarity\r\n except:\r\n return None\r\n\r\n# Apply the \"sentiment_func\" function. Add a new column 'Sentiment' to the data frame for the polarity scores \r\ndf2['Sentiment'] = df2['Review'].apply(sentiment_func)\r\nprint df2['Sentiment']\r\n\r\n# box plot of sentiment grouped by stars\r\ndf2.boxplot(column='Sentiment', by='Reviewer_Score')\r\n\r\n# reviews with most positive sentiment\r\ndf2[df2.Sentiment == 1].Review.head()\r\n\r\n# reviews with most negative sentiment\r\ndf2[df2.Sentiment == -1].Review.head()\r\n\r\n# reviews with most negative sentiment\r\ndf2[df2.Sentiment == 0].Review.head()\r\n\r\n# remove index column\r\n#print df2.shape\r\ndf2 = df2.reset_index()\r\ndf2=df2.drop(['index'],axis=1)\r\nprint df2.shape\r\n\r\n# repalace inf by NaN values\r\ndf2.replace([np.inf, -np.inf], np.nan) # This code doesn't seem to work. I still get error when I run my models\r\n\r\n# Let's replace all Nan values with 1234, and export to a new CSV file to examine each 1234 value now\r\ndf_cleaned = df2.fillna(1234)\r\nprint df_cleaned.shape \r\n\r\n# Export to CSV file\r\nimport os\r\npath_d = r'C:\\Users\\Huong Pham\\Documents\\Graduate School\\Winter 2019\\4 classes\\\\'\r\ndf_cleaned.to_csv(os.path.join(path_d,'testing.csv')) # Cleaned up the NaN values in Excel, then renamed 'testing' to 'clean_data_1'\r\n\r\n# Re-import the clean csv file \r\nimport pandas as pd #Import pandas library\r\npath = r'C:\\Users\\Huong Pham\\Documents\\Graduate School\\Winter 2019\\4 classes\\\\'\r\ndata = 'clean_data_1.csv'\r\ndf3= pd.read_csv(path+data)\r\nprint (df3.shape) \r\nlist(df3.columns.values)\r\n\r\n# CREATE LABEL VECTOR AND PREDICTORS\r\n# Create the outcome/lable vector (y)\r\ny = df3['Reviewer_Score']\r\nprint y.head()\r\n\r\n# Creating X which is the selected attributes as dummy data because jupyter get read nominal attributes\r\nfeature_cols = ['Additional_Number_of_Scoring','Sentiment', 'Reviewer_Nationality', 'Country', 'Average_Score', 'Total_Number_of_Reviews']\r\nfeat = df3[feature_cols]\r\nX = feat\r\ndf3 = pd.get_dummies(feat)\r\nX = df3\r\n\r\ndf3 = df3.reset_index(drop=True)\r\nprint df3.shape\r\nlist(df3.columns.values)\r\n\r\n#Define test_train data\r\nfrom sklearn.model_selection import train_test_split\r\nX_train, X_test, y_train, y_test = train_test_split(X, y)\r\n\r\n# Before splitting\r\nprint X.shape\r\n\r\n# After splitting\r\nprint X_train.shape\r\nprint X_test.shape\r\n\r\n# KNN MODEL\r\n'''\r\nStep 1: decide what an appropriate \"N\" is for our model. This was determined to be \"6\" based on running through options of \"N\" and comparing accuracy.\r\n'''\r\n#import the class\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nfrom sklearn.model_selection import KFold, cross_val_score\r\nimport matplotlib.pyplot as plt\r\n#Now, let's iterate through potential values of K to find an optimal value for our KNN model\r\nk_range = range(1,10)\r\n\r\nmodels = []\r\n\r\nfor k in k_range: \r\n knn = KNeighborsClassifier(n_neighbors=k)\r\n k_scores = cross_val_score(knn, X, y, cv=5, scoring='accuracy')\r\n models.append(k_scores.mean())\r\nprint(models)\r\n\r\nplt.plot(k_range, models)\r\nplt.xlabel('KNN Value')\r\nplt.ylabel('Accuracy Score')\r\n\r\n'''\r\nStep 2: Run KNN with Test Train Split\r\nNow that we know we want to us 6 as our KNN value, we can run the KNN Model using Test, Train, Split on our data and print the confusion matrix.\r\n'''\r\n#import the class\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nfrom sklearn.model_selection import train_test_split #import the model\r\n'''\r\nStep 2a: create test, train datasets\r\n'''\r\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=99)\r\n\r\nknn = KNeighborsClassifier(n_neighbors=6)\r\nknn.fit(X_train, y_train)\r\n\r\n'''\r\nStep 2b: test the model on the testing dataset and evaluate how accurate the model is,\r\n#based on the model trained on the training dataset \r\n'''\r\nfrom sklearn import metrics\r\ny_pred_class = knn.predict(X_test)\r\nprint (\"KNN Score Accurancy Score: \",metrics.accuracy_score(y_test, y_pred_class))\r\nprint (\"KNN Confusion Matrix: \",metrics.confusion_matrix(y_test, y_pred_class))\r\n\r\n'''\r\nStep 3: Run KNN with Test Cross Validation\r\n'''\r\nfrom sklearn.model_selection import KFold, cross_val_score\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nfrom sklearn import metrics\r\nimport matplotlib.pyplot as plt\r\n\r\nknn = KNeighborsClassifier(n_neighbors=10)\r\nscores = cross_val_score(knn, X, y, cv=10, scoring='accuracy') #cv is the cross-validation parameter\r\nprint (\"The average score : \" , scores.mean())\r\nprint (\"KNN Score Accurancy Score: \",metrics.accuracy_score(y_test, y_pred_class))\r\n\r\n# Evaluate the accuracy score of KNN model \r\nfrom sklearn.metrics import confusion_matrix, classification_report\r\nKNN_confusion = metrics.confusion_matrix(y_test, y_pred_class)\r\nprint KNN_confusion\r\nprint(\"Confusion Matrix\", classification_report(y_test, y_pred_class))\r\n\r\n# save confusion matrix and slice into four pieces - KNN Model\r\nKNN_TP = KNN_confusion[1][1]\r\nKNN_TN = KNN_confusion[0][0]\r\nKNN_FP = KNN_confusion[0][1]\r\nKNN_FN = KNN_confusion[1][0] \r\n\r\nprint 'True Positives:', KNN_TP\r\nprint 'True Negatives:', KNN_TN\r\nprint 'False Positives:',KNN_FP\r\nprint 'False Negatives:',KNN_FN\r\n\r\n# Plot the confusion matrix for KNN Model\r\nlabels = ['3','5','7','8']\r\nconf_m1 = metrics.confusion_matrix(y_test, y_pred_class)\r\nprint (\"Confusion Matrix: \",conf_m1)\r\nfig = plt.figure()\r\nax = fig.add_subplot(111)\r\ncax = ax.matshow(conf_m1)\r\nplt.title('Confusion matrix of the classifier')\r\nfig.colorbar(cax)\r\nax.set_xticklabels([''] + labels)\r\nax.set_yticklabels([''] + labels)\r\nplt.xlabel('Predicted')\r\nplt.ylabel('True')\r\nplt.show()\r\n\r\n# NAIVE BAY MODEL\r\nfrom sklearn.naive_bayes import GaussianNB\r\nnb = GaussianNB()\r\nGaussianNB()\r\nfrom sklearn.model_selection import cross_val_score\r\nscores = cross_val_score(nb, X,y, cv=2, scoring = 'accuracy')\r\nprint(scores).mean()\r\n\r\nfrom sklearn.model_selection import train_test_split #import the model\r\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=99)\r\nnb.fit(X_train, y_train)\r\ny_pred_class = nb.predict(X_test)\r\n\r\n# Print score, confusion matrix, classification report for Naive Bay model\r\nfrom sklearn.metrics import confusion_matrix, classification_report\r\nprint (\"NB Score Accurancy Score: \",metrics.accuracy_score(y_test, y_pred_class))\r\nprint (\"NB Confusion Matrix: \",metrics.confusion_matrix(y_test, y_pred_class))\r\nprint('\\n')\r\nprint(\"Confusion Matrix\", classification_report(y_test, y_pred_class))\r\n\r\n# save confusion matrix and slice into four pieces - Naive Bay Model\r\nNB_confusion = metrics.confusion_matrix(y_test, y_pred_class)\r\nNB_TP = NB_confusion[1][1]\r\nNB_TN = NB_confusion[0][0]\r\nNB_FP = NB_confusion[0][1]\r\nNB_FN = NB_confusion[1][0] \r\n\r\nprint 'True Positives:', NB_TP\r\nprint 'True Negatives:', NB_TN\r\nprint 'False Positives:',NB_FP\r\nprint 'False Negatives:',NB_FN\r\n\r\n# LOGISTIC MODEL\r\nfrom sklearn.linear_model import LogisticRegression\r\nlogreg = LogisticRegression(C=1e9)\r\nlogreg.fit(X_train, y_train) #Fit data on Logistic regression model\r\nzip(feature_cols, logreg.coef_[0]) #calculate the degree of correlation\r\n\r\n# calculate classification accuracy\r\nfrom sklearn import metrics\r\nl_pred_class = logreg.predict(X_test) #predicted probabilities\r\nprint metrics.accuracy_score(y_test, l_pred_class)\r\n\r\n# print confusion matrix for Logistic Regression\r\nprint metrics.confusion_matrix(y_test, l_pred_class)\r\nprint(\"Confusion Matrix\", classification_report(y_test, l_pred_class))\r\n\r\n# save confusion matrix and slice into four pieces\r\nlog_confusion = metrics.confusion_matrix(y_test, l_pred_class)\r\nlog_TP = log_confusion[1][1]\r\nlog_TN = log_confusion[0][0]\r\nlog_FP = log_confusion[0][1]\r\nlog_FN = log_confusion[1][0] \r\n\r\nprint 'True Positives:', log_TP\r\nprint 'True Negatives:', log_TN\r\nprint 'False Positives:', log_FP\r\nprint 'False Negatives:', log_FN"
},
{
"alpha_fraction": 0.6818376779556274,
"alphanum_fraction": 0.7170459628105164,
"avg_line_length": 32.761192321777344,
"blob_id": "637c5e93cbe6415b54ebb6f47b54ae54620a53c6",
"content_id": "156cb2657575e3e9067c26264a593224d35de08b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2329,
"license_type": "no_license",
"max_line_length": 209,
"num_lines": 67,
"path": "/Visualizations.py",
"repo_name": "huongp112/Hotel-Review-Analysis",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Apr 12 13:29:15 2019\r\n\r\n@author: Huong Pham\r\n\"\"\"\r\n\r\n# Batch imports of text processing libraries\r\nimport scipy as sp\r\nimport nltk\r\nimport string\r\nglobal string\r\nfrom collections import Counter\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\nimport pandas as pd # Import pandas library\r\n# Import the clean csv file \r\npath = r'C:\\Users\\Huong Pham\\Documents\\Graduate School\\Winter 2019\\Independent Study\\\\'\r\ndata = 'new_hotels.csv' \r\ndf= pd.read_csv(path+data)\r\nprint (df.shape)\r\n\r\n# create a new DataFrame that only contains the rounded numbers of Reviewer Scores\r\ndf1 = df[(df.Reviewer_Score==3) | (df.Reviewer_Score==4) | (df.Reviewer_Score==5) | (df.Reviewer_Score==6) | (df.Reviewer_Score==7) | (df.Reviewer_Score==8) | (df.Reviewer_Score==9) | (df.Reviewer_Score==10)] \r\nprint df1.shape\r\nlist(df1.columns.values)\r\n\r\n# Figure 1. Counts of Hotel Locations\r\nplt.figure(figsize=(15,7))\r\nplt.subplot(121)\r\ntoptrader_imp = df1.Country.value_counts(normalize=True)\r\ntoptrader_imp.head(30).plot(kind='bar', fontsize=10)\r\nplt.title('Hotel Locations', fontsize=15)\r\nplt.xticks(rotation=0)\r\n\r\n# Figure 2. Reviewer Score by Number of Reviewers\r\nfrom matplotlib import rcParams\r\nrcParams.update({'figure.autolayout': True})\r\nplt.style.use('fivethirtyeight')\r\nplt.figure(figsize=(10,7))\r\nplt.hist(df1['Reviewer_Score'],bins=20)\r\nplt.ylabel('Number_Reviewers',fontsize=16)\r\nplt.xlabel('Reviewer Score',fontsize=16)\r\nplt.title('Reviewer Score accross Users',fontsize=16)\r\nplt.axvline(df1['Reviewer_Score'].mean(), color='k', linestyle='dashed', linewidth=1)\r\nplt.savefig('Ratings_user.png')\r\n\r\n\r\n# Figure 3. Revier Score by Hotels\r\nplt.figure(figsize=(10,7))\r\nplt.hist(df1['Average_Score'],bins=20)\r\nplt.ylabel('Number Hotels',fontsize=16)\r\nplt.xlabel('Average Score',fontsize=16)\r\nplt.title('Average Score accross Hotels',fontsize=16)\r\nplt.axvline(df1['Average_Score'].mean(), color='k', linestyle='dashed', linewidth=1)\r\nplt.savefig('Ratings_hotel.png')\r\n\r\n# Figure 4. Histogram of Text Legnth Distribution\r\ndf1['text length'] = df1['Review'].apply(len)\r\ndf.head()\r\ng = sns.FacetGrid(data=df1, col='Reviewer_Score')\r\ng.map(plt.hist, 'text length', bins=50)\r\n\r\n# Figure 5. Boxplot of text legnth for each score\r\nsns.boxplot(x='Reviewer_Score', y='text length', data=df1)\r\n"
}
] | 2 |
nogozon/Scope | https://github.com/nogozon/Scope | f706d817c466e313cbf54e7723019b0b74711fb1 | 3e7e1bb7adfcb450a952789d68c9feeb07304e95 | 8c837e715e45e8a7a56fb99ec67d0e8a28b2f8e6 | refs/heads/master | 2021-01-11T01:57:44.640597 | 2016-10-13T19:13:38 | 2016-10-13T19:13:38 | 70,832,263 | 0 | 4 | null | 2016-10-13T17:42:21 | 2016-10-13T18:43:10 | 2016-10-13T19:13:38 | Python | [
{
"alpha_fraction": 0.739130437374115,
"alphanum_fraction": 0.739130437374115,
"avg_line_length": 33.5,
"blob_id": "82d33e40e775ba4ba7b0227cb86412709d37fcf9",
"content_id": "9337c7c27249ccb279ed4ee1f21f18c1c0a01dfc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 138,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 4,
"path": "/hello.py",
"repo_name": "nogozon/Scope",
"src_encoding": "UTF-8",
"text": "print \"Hello Scope\"\nmembers = [\"ioanavalero\",\"miguelython\",\"Neslihancetin\",\"yetkinyilmaz\"]\nfor member in members:\n print \"Hola \", member\n"
}
] | 1 |
Arjun9088/testRepo | https://github.com/Arjun9088/testRepo | f2a6130bd38ecdf4e740bb4811f2e3bef8d4256a | 19fcd14bc326b957cdb3bbb14ace34a18dd79050 | 8dacf9bf5dc470270342328ae8bdf2e15c5003f3 | refs/heads/master | 2023-03-01T13:18:40.704775 | 2021-02-06T15:13:14 | 2021-02-06T15:13:14 | 336,554,966 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.47962382435798645,
"alphanum_fraction": 0.5266457796096802,
"avg_line_length": 13.5,
"blob_id": "7fc416154a49537b08c105c204e7d2e5d1a81fa8",
"content_id": "a86756712443b218e1296c0dc22b8fd7fd05df26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 319,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 22,
"path": "/pycode.py",
"repo_name": "Arjun9088/testRepo",
"src_encoding": "UTF-8",
"text": "import sys\n\ndef add(a, b):\n return a + b\n\ndef sub(a, b):\n try:\n c = a / b\n return c\n except:\n print(\"Division by zero not possible\")\n sys.exit()\ndef mul(a, b):\n return a * b\n\ndef div(a, b):\n return a / b\n\nprint(add(10,12))\nprint(sub(10,12))\nprint(mul(10,12))\nprint(div(10,0))\n"
}
] | 1 |
ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning | https://github.com/ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning | e6627ddf0a00ee8da813c29d9d57e3027eb4f500 | a2c9fe632fa999e6c01c6bbb42140667ad91c40a | aed2dc2b2dea3561fcf7279df289a24f1558ab73 | refs/heads/master | 2023-02-01T13:58:08.700134 | 2020-12-19T20:41:53 | 2020-12-19T20:41:53 | 322,932,956 | 2 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6297016739845276,
"alphanum_fraction": 0.6439688801765442,
"avg_line_length": 27.054546356201172,
"blob_id": "1df41cb885f920c68781a56420dc12d5f858baf1",
"content_id": "b659255b716ce2c56676aae56b9bbf2f7a8171c6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1542,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 55,
"path": "/Umbrella_Academy_INFO7390_Project/INFO7390_Notebooks/modules/SL_data.py",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "from torch.utils.data import DataLoader\nfrom torch.autograd import Variable\nfrom PIL import Image\nimport torch\nfrom torchvision import transforms\nimport os\n\ndef load_image(path):\n # open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)\n with open(path, 'rb') as f:\n img = Image.open(f)\n return img.convert('RGB')\n\ntransform_driving_image = transforms.Compose([\n transforms.CenterCrop(72),\n transforms.ToTensor(),\n transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),\n])\n\nclass CustomDataset:\n def __init__(self, dataset_path):\n self.images = os.path.join(dataset_path, \"images\")\n with open(os.path.join(dataset_path, \"labels.txt\"), 'r') as f:\n lines = [l.strip().split() for l in f.readlines()]\n lines = [[f, int(label)] for (f, label) in lines]\n self.labels = lines\n self.transform = transform_driving_image\n \n def __len__(self):\n return len(self.labels)\n \n def __getitem__(self, index):\n image_name, label = self.labels[index]\n return self.transform(load_image(os.path.join(self.images, image_name))), torch.LongTensor([label])\n\n\ndef get_dataloader(dataset_path, batch_size):\n dataset = CustomDataset(dataset_path)\n return DataLoader(dataset, batch_size=batch_size, shuffle=True)\n\n\nLEFT = 0\nRIGHT = 1\nGO = 2\nACTIONS = [LEFT, RIGHT, GO]\n\n\n\ndef test():\n loader = CustomDataset(\"dataset2\")\n print(len(loader))\n print(loader[0])\n\nif __name__=='__main__':\n test()"
},
{
"alpha_fraction": 0.738206684589386,
"alphanum_fraction": 0.7493698000907898,
"avg_line_length": 33.25925827026367,
"blob_id": "d9fbc3ffe2781837c4dc06c3ad1e809ff8baead2",
"content_id": "263cae0cdd655adaa883d82d61531b938d43f9b2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2779,
"license_type": "permissive",
"max_line_length": 407,
"num_lines": 81,
"path": "/README.md",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "# Umbrella_Academy\n# Self Driving Car in OpenAI Gym using Imitation Learning and Reinforcement Learning\n## Overview\n\n### Part 1\nDriving requires reacting to a wide variety of complex environment conditions and agent behaviors. Explicitly modeling each possible scenario is unrealistic. In contrast, imitation learning can, in theory, leverage data from large fleets of human-driven cars. In the following project we have tried simulating a beginner’s version of self-driving car using basics of Imitation Learning (Behavioral Cloning).\n\n### Part 2\nIn the following project we implemented deep-q learning model for self driving cars.\n\n\n## Requirements (Libraries)\n- OpenAI Gym\n- Tensorflow=1.15.0-rc3\n- Pytorch=0.3\n- PIL\n- Keras\n- Opencv\n- Skimage\n- Imageio\n- Pyglet\n- Numpy\n- Matplotlib\n\n## Environment Setup\nIn order to begin with this project, we suggest you to make two different virtual environment , one for supervised learning and other for reinforcement learning. This is done so that there is no conflict in version libraries installed. \n\n- Commands for making new virtual environment in Anaconda:-\n\n> - conda create -n yourenvname python=3.7 anaconda\n> - conda activate yourenvname\n> - conda install -c conda-forge nb_conda_kernels\n> - conda install -c conda-forge nb_conda\n> - conda install -c conda-forge ipykernel\n\n\n### For other person to use the environment\nconda env create -f <environment-name>.yml\n\n## Python Files (Mandatory)\nFor your code to run smoothly there are some python scripts in (folder modules) which are imported in notebooks directly. In order to understand the models better please go through those scripts.\n\n\n## Folder Structure\n\n- INFO7390_SelfDrivingCar\n- README.md\n- Research Paper\n- INFO7390_FinalProject.ipynb\n- main_videos\n- images_main_notebook\n- autonomous.yml\n- self-driving.yml\n- requirements_SL.txt\n- requirements_RL.txt\n> Umbrella_Academy_INFO7390_Project\n- INFO7390_Notebooks\n - sdc_gym (gym environment)\n - modules (py files)\n - SL_model.py\n - SL_data.py\n - RL_dqn.py\n - RL_car_dqn.py\n - RL_exp_replay.py\n - RL_processingimage.py\n - Basics_of_Convolutional_Neural_Network.ipynb\n - Self Driving Car using Supervised Learning\n - Basics_of_Deep_Q_Learning\n - Self Driving Car using Reinforcement Learning \n - Supervised_IL_train_images\n - Supervised_IL_test_images\n - Supervised_IL_models\n - IL_Videos\n - Images (All images used in project)\n \n\n## How to Run\n- Clone the repository in your local disk.\n- Refer the folder structure mentioned above, and open the file \"INFO7390_FinalProject.ipynb\".\n- This is the master notebook and it links to all the different parts of the project.\n- Please make sure that the libraries mentioned above are all installed\n\n\n"
},
{
"alpha_fraction": 0.4728725850582123,
"alphanum_fraction": 0.7569788098335266,
"avg_line_length": 24.238636016845703,
"blob_id": "3f596696cc77e182f72d976172c73ed592d597a5",
"content_id": "98f652fe5505e2a4e0a7eabdcad9d4ed4a718d4b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 8884,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 352,
"path": "/requirements_SL.txt",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "# This file may be used to create an environment using:\n# $ conda create --name <env> --file <this file>\n# platform: osx-64\n_anaconda_depends=2020.07=py37_0\nabsl-py=0.11.0=py37hf985489_0\nalabaster=0.7.12=py37_0\nanaconda=custom=py37_1\nanaconda-client=1.7.2=py37_0\nanaconda-project=0.8.4=py_0\napplaunchservices=0.2.1=py_0\nappnope=0.1.0=py37_0\nappscript=1.1.1=py37haf1e3a3_0\nargh=0.26.2=py37_0\nargon2-cffi=20.1.0=py37haf1e3a3_1\nasn1crypto=1.4.0=py_0\nastor=0.8.1=pyh9f0ad1d_0\nastroid=2.4.2=py37_0\nastropy=4.0.2=py37haf1e3a3_0\nasync_generator=1.10=py37h28b3542_0\natomicwrites=1.4.0=py_0\nattrs=20.3.0=pyhd3eb1b0_0\nautopep8=1.5.4=py_0\nbabel=2.8.1=pyhd3eb1b0_0\nbackcall=0.2.0=py_0\nbackports=1.0=py_2\nbackports.shutil_get_terminal_size=1.0.0=py37_2\nbeautifulsoup4=4.9.3=pyhb0f4dca_0\nbitarray=1.6.1=py37h9ed2024_0\nbkcharts=0.2=py37_0\nblas=1.0=mkl\nbleach=3.2.1=py_0\nblosc=1.20.1=hab81aa3_0\nbokeh=2.2.3=py37_0\nboto=2.49.0=py37_0\nbottleneck=1.3.2=py37hf1fa96c_1\nbox2d-py=2.3.8=pypi_0\nbrotlipy=0.7.0=py37haf1e3a3_1000\nbzip2=1.0.8=h1de35cc_0\nc-ares=1.17.1=hc929b4f_0\nca-certificates=2020.12.8=hecd8cb5_0\ncairo=1.16.0=h0ab9d94_1001\ncertifi=2020.12.5=py37hecd8cb5_0\ncffi=1.14.3=py37hed5b41f_0\nchardet=3.0.4=py37_1003\nclick=7.1.2=py_0\ncloudpickle=1.6.0=py_0\nclyent=1.2.2=py37_1\ncolorama=0.4.4=py_0\ncontextlib2=0.6.0.post1=py_0\ncryptography=3.1.1=py37hddc9c9b_0\ncurl=7.71.1=hb0a8c7a_1\ncycler=0.10.0=py37_0\ncython=0.29.21=py37hb1e8313_0\ncytoolz=0.11.0=py37haf1e3a3_0\ndask=2.30.0=py_0\ndask-core=2.30.0=py_0\ndbus=1.13.18=h18a8e69_0\ndecorator=4.4.2=py_0\ndefusedxml=0.6.0=py_0\ndiff-match-patch=20200713=py_0\ndistributed=2.30.1=py37hecd8cb5_0\ndlib=19.20=py37h801b7fd_1\ndocutils=0.16=py37_1\nentrypoints=0.3=py37_0\net_xmlfile=1.0.1=py_1001\nexpat=2.2.10=hb1e8313_2\nfastcache=1.1.0=py37h1de35cc_0\nffmpeg=4.1.3=h5c2b479_0\nfilelock=3.0.12=py_0\nflake8=3.8.4=py_0\nflask=1.1.2=py_0\nfontconfig=2.13.1=h1027ab8_1000\nfreetype=2.10.4=ha233b18_0\nfsspec=0.8.3=py_0\nfuture=0.18.2=py37_1\ngast=0.4.0=pyh9f0ad1d_0\nget_terminal_size=1.0.0=h7520d66_0\ngettext=0.19.8.1=hb0f4f8b_2\ngevent=20.9.0=py37haf1e3a3_0\ngiflib=5.2.1=hbcb3906_2\nglib=2.66.1=h9bbe63b_0\nglob2=0.7=py_0\ngmp=6.1.2=hb37e062_1\ngmpy2=2.0.8=py37h6ef4df4_2\ngnutls=3.6.13=hc269f14_0\ngoogle-pasta=0.2.0=pyh8c360ce_0\ngraphite2=1.3.13=h12caacf_1001\ngreenlet=0.4.17=py37haf1e3a3_0\ngrpcio=1.30.0=py37hf940fad_0\ngym=0.10.8=dev_0\nh5py=2.10.0=py37h3134771_0\nharfbuzz=2.4.0=h92b87b8_1\nhdf5=1.10.4=hfa1e0ec_0\nheapdict=1.0.1=py_0\nhtml5lib=1.1=py_0\nicu=58.2=h0a44026_3\nidna=2.10=py_0\nimageio=2.9.0=py_0\nimagesize=1.2.0=py_0\nimportlib-metadata=2.0.0=py_1\nimportlib_metadata=2.0.0=1\niniconfig=1.1.1=py_0\nintel-openmp=2019.4=233\nintervaltree=3.1.0=py_0\nipykernel=5.3.4=py37h5ca1d4c_0\nipython=7.19.0=py37h01d92e1_0\nipython_genutils=0.2.0=py37_0\nipywidgets=7.5.1=py_1\nisort=5.6.4=py_0\nitsdangerous=1.1.0=py37_0\njasper=1.900.1=h1f36771_4\njbig=2.1=h4d881f8_0\njdcal=1.4.1=py_0\njedi=0.17.1=py37_0\njinja2=2.11.2=py_0\njoblib=0.17.0=py_0\njpeg=9d=hbcb3906_0\njson5=0.9.5=py_0\njsonschema=3.2.0=py_2\njupyter=1.0.0=py37_7\njupyter_client=6.1.7=py_0\njupyter_console=6.2.0=py_0\njupyter_core=4.6.3=py37_0\njupyterlab=2.2.6=py_0\njupyterlab_pygments=0.1.2=py_0\njupyterlab_server=1.2.0=py_0\nkeras-applications=1.0.8=py_1\nkeras-preprocessing=1.1.0=py_0\nkeyring=21.4.0=py37_1\nkiwisolver=1.3.0=py37h23ab428_0\nkrb5=1.18.2=h75d18d8_0\nlame=3.100=h1de35cc_1001\nlazy-object-proxy=1.4.3=py37h1de35cc_0\nlcms2=2.11=h92f6f08_0\nlibarchive=3.4.2=haa3ed63_0\nlibblas=3.8.0=14_mkl\nlibcblas=3.8.0=14_mkl\nlibcurl=7.71.1=h8a08a2b_1\nlibcxx=10.0.0=1\nlibedit=3.1.20191231=h1de35cc_1\nlibffi=3.3=hb1e8313_2\nlibgfortran=3.0.1=h93005f0_2\nlibiconv=1.16=h1de35cc_0\nliblapack=3.8.0=14_mkl\nliblapacke=3.8.0=14_mkl\nliblief=0.10.1=h0a44026_0\nlibllvm10=10.0.1=h76017ad_5\nlibllvm9=9.0.1=h7475705_1\nlibpng=1.6.37=ha441bb4_0\nlibprotobuf=3.12.3=hab81aa3_2\nlibsodium=1.0.18=h1de35cc_0\nlibspatialindex=1.9.3=h0a44026_0\nlibssh2=1.9.0=ha12b0ac_1\nlibtiff=4.1.0=hcb84e12_1\nlibuv=1.40.0=haf1e3a3_0\nlibwebp=1.0.2=hd3bf737_5\nlibxml2=2.9.10=h7cdb67c_3\nlibxslt=1.1.34=h83b36ba_0\nllvm-openmp=10.0.0=h28b9765_0\nllvmlite=0.34.0=py37h739e7dc_4\nlocket=0.2.0=py37_1\nlxml=4.6.1=py37h63b7cb6_0\nlz4-c=1.9.2=h79c402e_3\nlzo=2.10=haf1e3a3_2\nmarkdown=3.3.3=pyh9f0ad1d_0\nmarkupsafe=1.1.1=py37h1de35cc_0\nmatplotlib=3.3.2=0\nmatplotlib-base=3.3.2=py37h181983e_0\nmccabe=0.6.1=py37_1\nmistune=0.8.4=py37h1de35cc_0\nmkl=2019.4=233\nmkl-service=2.3.0=py37hfbe908c_0\nmkl_fft=1.2.0=py37hc64f4ea_0\nmkl_random=1.1.1=py37h959d312_0\nmock=4.0.2=py_0\nmore-itertools=8.6.0=pyhd3eb1b0_0\nmpc=1.1.0=h6ef4df4_1\nmpfr=4.0.2=h9066e36_1\nmpmath=1.1.0=py37_0\nmsgpack-python=1.0.0=py37h04f5b5a_1\nmultipledispatch=0.6.0=py37_0\nnbclient=0.5.1=py_0\nnbconvert=6.0.7=py37_0\nnbformat=5.0.8=py_0\nncurses=6.2=h0a44026_1\nnest-asyncio=1.4.2=pyhd3eb1b0_0\nnettle=3.4.1=h3efe00b_1002\nnetworkx=2.5=py_0\nninja=1.10.2=py37hf7b0b51_0\nnltk=3.5=py_0\nnose=1.3.7=py37_2\nnotebook=6.1.4=py37_0\nnumba=0.51.2=py37h959d312_1\nnumexpr=2.7.1=py37hce01a72_0\nnumpy=1.19.2=py37h456fd55_0\nnumpy-base=1.19.2=py37hcfb5961_0\nnumpydoc=1.1.0=pyhd3eb1b0_1\nolefile=0.46=py37_0\nopenblas=0.2.19=2\nopencv=4.1.0=py37h3012f30_4\nopenh264=1.8.0=hd9629dc_1000\nopenpyxl=3.0.5=py_0\nopenssl=1.1.1i=h9ed2024_0\npackaging=20.4=py_0\npandas=1.1.3=py37hb1e8313_0\npandoc=2.11=h0dc7051_0\npandocfilters=1.4.3=py37hecd8cb5_1\nparso=0.7.0=py_0\npartd=1.1.0=py_0\npath=15.0.0=py37_0\npath.py=12.5.0=0\npathlib2=2.3.5=py37_0\npathtools=0.1.2=py_1\npatsy=0.5.1=py37_0\npcre=8.44=hb1e8313_0\npep8=1.7.1=py37_0\npexpect=4.8.0=py37_0\npickleshare=0.7.5=py37_0\npillow=8.0.1=py37h5270095_0\npip=20.2.4=py37hecd8cb5_0\npixman=0.38.0=h01d97ff_1003\npkginfo=1.6.1=py37hecd8cb5_0\npluggy=0.13.1=py37_0\nply=3.11=py37_0\nprometheus_client=0.8.0=py_0\nprompt-toolkit=3.0.8=py_0\nprompt_toolkit=3.0.8=0\nprotobuf=3.12.3=py37h570ac47_0\npsutil=5.7.2=py37haf1e3a3_0\nptyprocess=0.6.0=py37_0\npy=1.9.0=py_0\npy-lief=0.10.1=py37haf313ee_0\npycodestyle=2.6.0=py_0\npycosat=0.6.3=py37h1de35cc_0\npycparser=2.20=py_2\npycrypto=2.6.1=py37haf1e3a3_10\npycurl=7.43.0.6=py37hddc9c9b_0\npydocstyle=5.1.1=py_0\npyflakes=2.2.0=py_0\npyglet=1.5.11=pypi_0\npygments=2.7.2=pyhd3eb1b0_0\npylint=2.6.0=py37_0\npyodbc=4.0.30=py37h0a44026_0\npyopenssl=19.1.0=py_1\npyparsing=2.4.7=py_0\npyqt=5.9.2=py37h655552a_2\npyrsistent=0.17.3=py37haf1e3a3_0\npysocks=1.7.1=py37_0\npytables=3.6.1=py37h5bccee9_0\npytest=6.1.1=py37_0\npython=3.7.9=h26836e1_0\npython-dateutil=2.8.1=py_0\npython-jsonrpc-server=0.4.0=py_0\npython-language-server=0.35.1=py_0\npython-libarchive-c=2.9=py_0\npython.app=2=py37_10\npython_abi=3.7=1_cp37m\npytorch=1.7.0=py3.7_0\npytz=2020.1=py_0\npywavelets=1.1.1=py37haf1e3a3_2\npyyaml=5.3.1=py37haf1e3a3_1\npyzmq=19.0.2=py37hb1e8313_1\nqdarkstyle=2.8.1=py_0\nqt=5.9.7=h468cd18_1\nqtawesome=1.0.1=py_0\nqtconsole=4.7.7=py_0\nqtpy=1.9.0=py_0\nreadline=8.0=h1de35cc_0\nregex=2020.10.15=py37haf1e3a3_0\nrequests=2.24.0=py_0\nripgrep=12.1.1=0\nrope=0.18.0=py_0\nrtree=0.9.4=py37_1\nruamel_yaml=0.15.87=py37haf1e3a3_1\nscikit-image=0.17.2=py37h81aa140_0\nscikit-learn=0.23.2=py37h959d312_0\nscipy=1.5.2=py37h912ce22_0\nseaborn=0.11.0=py_0\nsend2trash=1.5.0=py37_0\nsetuptools=50.3.1=py37hecd8cb5_1\nsimplegeneric=0.8.1=py37_2\nsingledispatch=3.4.0.3=py_1001\nsip=4.19.8=py37h0a44026_0\nsix=1.15.0=py37hecd8cb5_0\nsnappy=1.1.8=h4a8c4bd_3\nsnowballstemmer=2.0.0=py_0\nsortedcollections=1.2.1=py_0\nsortedcontainers=2.2.2=py_0\nsoupsieve=2.0.1=py_0\nsphinx=3.2.1=py_0\nsphinxcontrib=1.0=py37_1\nsphinxcontrib-applehelp=1.0.2=py_0\nsphinxcontrib-devhelp=1.0.2=py_0\nsphinxcontrib-htmlhelp=1.0.3=py_0\nsphinxcontrib-jsmath=1.0.1=py_0\nsphinxcontrib-qthelp=1.0.3=py_0\nsphinxcontrib-serializinghtml=1.1.4=py_0\nsphinxcontrib-websupport=1.2.4=py_0\nspyder=4.1.5=py37_0\nspyder-kernels=1.9.4=py37_0\nsqlalchemy=1.3.20=py37h9ed2024_0\nsqlite=3.33.0=hffcf06c_0\nstatsmodels=0.12.0=py37haf1e3a3_0\nsympy=1.6.2=py37hecd8cb5_1\ntbb=2019.9=ha1b3eb9_1\ntblib=1.7.0=py_0\ntensorboard=1.14.0=py37_0\ntensorflow=1.14.0=h3cdfc77_0\ntensorflow-base=1.14.0=py37hc8dfbb8_0\ntensorflow-estimator=1.14.0=py37h5ca1d4c_0\ntermcolor=1.1.0=py_2\nterminado=0.9.1=py37_0\ntestpath=0.4.4=py_0\nthreadpoolctl=2.1.0=pyh5ca1d4c_0\ntifffile=2020.10.1=py37h0cf3a3e_2\ntk=8.6.10=hb0a8c7a_0\ntoml=0.10.1=py_0\ntoolz=0.11.1=py_0\ntorchvision=0.8.1=py37_cpu\ntornado=6.0.4=py37h1de35cc_1\ntqdm=4.50.2=py_0\ntraitlets=5.0.5=py_0\ntyped-ast=1.4.1=py37h1de35cc_0\ntyping_extensions=3.7.4.3=py_0\nujson=4.0.1=py37hb1e8313_0\nunicodecsv=0.14.1=py37_0\nunixodbc=2.3.9=haf1e3a3_0\nurllib3=1.25.11=py_0\nwatchdog=0.10.3=py37haf1e3a3_0\nwcwidth=0.2.5=py_0\nwebencodings=0.5.1=py37_1\nwerkzeug=1.0.1=py_0\nwheel=0.35.1=py_0\nwidgetsnbextension=3.5.1=py37_0\nwrapt=1.11.2=py37h1de35cc_0\nwurlitzer=2.0.1=py37_0\nx264=1!152.20180806=h1de35cc_0\nxlrd=1.2.0=py37_0\nxlsxwriter=1.3.7=py_0\nxlwings=0.20.8=py37_0\nxlwt=1.3.0=py37_0\nxz=5.2.5=h1de35cc_0\nyaml=0.2.5=haf1e3a3_0\nyapf=0.30.0=py_0\nzeromq=4.3.3=hb1e8313_3\nzict=2.0.0=py_0\nzipp=3.4.0=pyhd3eb1b0_0\nzlib=1.2.11=h1de35cc_3\nzope=1.0=py37_1\nzope.event=4.5.0=py37_0\nzope.interface=5.1.2=py37haf1e3a3_0\nzstd=1.4.5=h41d2c2f_0\n"
},
{
"alpha_fraction": 0.5706276893615723,
"alphanum_fraction": 0.5877217054367065,
"avg_line_length": 39.29999923706055,
"blob_id": "c49e5946b39330df57f666f93fd849e3fea8b952",
"content_id": "e395bf7641024608e95028c5105caf2df59f3a17",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10881,
"license_type": "permissive",
"max_line_length": 167,
"num_lines": 270,
"path": "/Umbrella_Academy_INFO7390_Project/INFO7390_Notebooks/modules/RL_dqn.py",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "from __future__ import generator_stop\nfrom modules.RL_exp_replay import ExperienceReplay\nimport numpy as np\n#from tensorflow.python.compiler.tensorrt import trt\nimport tensorflow.contrib.slim as slim\nimport tensorflow as tf\nimport re\nfrom modules.RL_processimage import processimage\n\n\nclass DQN:\n\n def __init__(self,\n env,\n batchsize=64,\n pic_size=(96, 96),\n num_frame_stack=3,\n gamma=0.95,\n frame_skip=3,\n train_freq=3,\n initial_epsilon=1,\n min_epsilon=0.05,\n render=False,\n epsilon_decay_steps=int(100000),\n min_experience_size=int(1000),\n experience_capacity=int(100000),\n target_network_update_freq=1000,\n regularization = 1e-6,\n optimizer_params = None,\n action_map=None\n ):\n self.exp_history = ExperienceReplay(\n num_frame_stack,\n capacity=experience_capacity,\n pic_size=pic_size\n )\n\n # in playing mode we don't store the experience to agent history\n # but this cache is still needed to get the current frame stack\n self.playing_cache = ExperienceReplay(\n num_frame_stack,\n capacity=num_frame_stack * 5 + 10,\n pic_size=pic_size\n )\n\n if action_map is not None:\n self.dim_actions = len(action_map)\n else:\n self.dim_actions = env.action_space.n\n\n self.target_network_update_freq = target_network_update_freq\n self.action_map = action_map\n self.env = env\n self.batchsize = batchsize\n self.num_frame_stack = num_frame_stack\n self.gamma = gamma\n self.frame_skip = frame_skip\n self.train_freq = train_freq\n self.initial_epsilon = initial_epsilon\n self.min_epsilon = min_epsilon\n self.epsilon_decay_steps = epsilon_decay_steps\n self.render = render\n self.min_experience_size = min_experience_size\n self.pic_size = pic_size\n self.regularization = regularization\n # These default magic values always work with Adam\n self.global_step = tf.Variable(0, trainable=False)\n self.increment_global_step_op = tf.assign(self.global_step, self.global_step+1)\n self.decayed_lr = tf.train.exponential_decay(0.001, self.global_step, 200000, 0.7, staircase=False)\n lr = self.decayed_lr\n # lr = 0.001\n self.optimizer_params = optimizer_params or dict(learning_rate=lr, epsilon=1e-7)\n\n self.do_training = True\n self.playing_epsilon = 0.0\n self.session = None\n\n self.state_size = (self.num_frame_stack,) + self.pic_size\n self.global_counter = 0\n self.episode_counter = 0\n\n def build_graph(self):\n input_dim_general = (None, self.pic_size[0], self.pic_size[1], self.num_frame_stack) # (None, 4, 96, 96) changed to (None, 96, 96, 4)\n input_dim_with_batch = (self.batchsize, self.pic_size[0], self.pic_size[1], self.num_frame_stack) #Input dimensions: (64, 4, 96, 96) changed to (64, 96, 96, 4)\n\n self.input_prev_state = tf.compat.v1.placeholder(tf.float32, input_dim_general, \"prev_state\")\n self.input_next_state = tf.compat.v1.placeholder(tf.float32, input_dim_with_batch, \"next_state\")\n self.input_reward = tf.compat.v1.placeholder(tf.float32, self.batchsize, \"reward\")\n self.input_actions = tf.compat.v1.placeholder(tf.int32, self.batchsize, \"actions\")\n self.input_done_mask = tf.compat.v1.placeholder(tf.int32, self.batchsize, \"done_mask\")\n\n # The target Q-values come from the fixed network\n with tf.compat.v1.variable_scope(\"fixed\"): #64 96 96 3\n # Create target network which is gonna be fixed and updated every C parameters\n qsa_targets = self.create_network(self.input_next_state, trainable=False)\n\n with tf.compat.v1.variable_scope(\"train\"): # ? 96 96 3\n # Create Prediction/Estimate network which will be trained/updated every 3 frames\n # Create Prediction/Estimate network which will be trained/updated every 3 frames\n qsa_estimates = self.create_network(self.input_prev_state, trainable=True)\n\n self.best_action = tf.argmax(qsa_estimates, axis=1)\n\n not_done = tf.cast(tf.logical_not(tf.cast(self.input_done_mask, \"bool\")), \"float32\")\n # select the chosen action from each row\n # in numpy this is qsa_estimates[range(batchsize), self.input_actions]\n action_slice = tf.stack([tf.range(0, self.batchsize), self.input_actions], axis=1)\n #\n q_estimates_for_input_action = tf.gather_nd(qsa_estimates, action_slice)\n\n #Taken from paper : Loss = [(r + gamma*max Qtarget)-(Q estimate)^2]\n q_target = tf.reduce_max(qsa_targets, -1) * self.gamma * not_done + self.input_reward\n training_loss = tf.nn.l2_loss(q_target - q_estimates_for_input_action) / self.batchsize\n\n # reg_loss = tf.add_n(tf.losses.get_regularization_losses())\n reg_loss = [0]\n\n #Adam optimizer\n\t\t#optimizer = tf.train.AdamOptimizer(**(self.optimizer_params))\n optimizer = tf.compat.v1.train.AdamOptimizer(**(self.optimizer_params))\n #Adadelta optimizer:\n # optimizer = tf.train.RMSPropOptimizer(**(self.optimizer_params))\n\n self.train_op = optimizer.minimize(reg_loss + training_loss)\n\n train_params = self.get_variables(\"train\")\n fixed_params = self.get_variables(\"fixed\")\n\n\n assert (len(train_params) == len(fixed_params))\n self.copy_network_ops = [tf.assign(fixed_v, train_v) for train_v, fixed_v in zip(train_params, fixed_params)]\n\n def get_variables(self, scope):\n vars = [t for t in tf.compat.v1.global_variables()\n if \"%s/\" % scope in t.name and \"Adam\" not in t.name]\n return sorted(vars, key=lambda v: v.name)\n\n def create_network(self, input, trainable):\n if trainable:\n # wr = None\n wr = tf.compat.v1.keras.regularizers.l2(l=self.regularization)\n else:\n wr = None\n\n net = tf.layers.conv2d(inputs=input, filters=8, kernel_size=(7,7), strides=4, name='conv1', kernel_regularizer=wr)\n net = tf.nn.relu(net)\n net = tf.nn.max_pool2d(net, ksize=2, strides=2, padding='SAME')\n net = tf.layers.conv2d(inputs=net, filters=16, kernel_size=(3, 3), strides=1, name='conv2',\n kernel_regularizer=wr)\n net = tf.nn.relu(net)\n net = tf.nn.max_pool2d(net, ksize=2, strides=2, padding='SAME')\n net = tf.layers.flatten(net)\n net = tf.layers.dense(net, 400, activation=tf.nn.relu, kernel_regularizer=wr)\n # net = tf.layers.dropout(net, 0.5)\n q_state_action_values = tf.layers.dense(net, self.dim_actions, activation=None, kernel_regularizer=wr)\n\n return q_state_action_values\n\n # def check_early_stop(self, reward, totalreward):\n # return False, 0.0\n\n def get_random_action(self):\n return np.random.choice(self.dim_actions)\n\n def get_epsilon(self):\n if not self.do_training:\n return self.playing_epsilon\n elif self.global_counter >= self.epsilon_decay_steps:\n return self.min_epsilon\n else:\n # linear decay\n r = 1.0 - self.global_counter / float(self.epsilon_decay_steps)\n return self.min_epsilon + (self.initial_epsilon - self.min_epsilon) * r\n\n def train(self):\n batch = self.exp_history.sample_mini_batch(self.batchsize)\n # Feed dict\n fd = {\n self.input_reward: \"reward\",\n self.input_prev_state: \"prev_state\",\n self.input_next_state: \"next_state\",\n self.input_actions: \"actions\",\n self.input_done_mask: \"done_mask\"\n }\n fd1 = {ph: batch[k] for ph, k in fd.items()}\n self.session.run([self.train_op], fd1)\n\n def play_episode(self, render, load_checkpoint):\n eh = (\n self.exp_history if self.do_training\n else self.playing_cache\n )\n total_reward = 0\n total_score = 0\n frames_in_episode = 0\n\n first_frame = self.env.reset()\n first_frame_pp = processimage.process_image(first_frame)\n\n eh.start_new_episode(first_frame_pp)\n\n epsilon = self.get_epsilon()\n while True:\n if np.random.rand() > epsilon and not load_checkpoint:\n action_idx = self.session.run(\n self.best_action,\n {self.input_prev_state: eh.current_state()[np.newaxis, ...]}\n )[0]\n elif not load_checkpoint:\n action_idx = self.get_random_action()\n elif load_checkpoint:\n action_idx = self.session.run(\n self.best_action,\n {self.input_prev_state: eh.current_state()[np.newaxis, ...]}\n )[0]\n\n if self.action_map is not None:\n action = self.action_map[action_idx]\n else:\n action = action_idx\n\n reward = 0\n score = 0\n for _ in range(self.frame_skip):\n observation, r, done, info = self.env.step(action)\n if render:\n self.env.render()\n\n\n score += r\n #Increase rewards on the last frames if reward is positive\n if r > 0:\n r = r + frames_in_episode*0.2 #in 230 frames late game it adds +- 50 reward to tiles\n reward += r\n\n if done:\n break\n\n early_done, punishment = self.check_early_stop(reward, total_reward, frames_in_episode)\n if early_done:\n reward += punishment\n\n done = done or early_done\n\n total_reward += reward\n total_score += score\n frames_in_episode += 1\n observation = processimage.process_image(observation)\n eh.add_experience(observation, action_idx, done, reward)\n\n if self.do_training:\n self.global_counter += 1\n step = self.session.run(self.increment_global_step_op)\n if self.global_counter % self.target_network_update_freq:\n self.update_target_network()\n train_cond = (\n self.exp_history.counter >= self.min_experience_size and\n self.global_counter % self.train_freq == 0\n )\n if train_cond:\n self.train()\n\n if done:\n if self.do_training:\n self.episode_counter += 1\n\n return total_score, total_reward, frames_in_episode, epsilon\n\n def update_target_network(self):\n self.session.run(self.copy_network_ops)\n"
},
{
"alpha_fraction": 0.5290468335151672,
"alphanum_fraction": 0.564015805721283,
"avg_line_length": 29.586206436157227,
"blob_id": "70990c49cea3ce37f9e7f302244db12de1010320",
"content_id": "973576063bce401f6e1e5c7cee50081130257d7c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1773,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 58,
"path": "/Umbrella_Academy_INFO7390_Project/INFO7390_Notebooks/modules/RL_car_dqn.py",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "from modules.RL_dqn import DQN\nimport numpy as np\nfrom skimage import color\nimport itertools as it\n\n\n\nclass CarRacingDQN(DQN):\n #CarRacing specific part of the DQN-agent\n\n\n # ** is used for unpacking the model configurations\n def __init__(self, max_negative_rewards=100, **model_config):\n\n #Define all 12 actions possible:\n # all_actions = np.array([k for k in it.product([-1, 0, 1], [1, 0], [0.5, 0])])\n\n #selected 5 actions:\n all_actions = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, 0.5], [0, 0, 0],[1, 0, 0]])\n\n #Set self parameters\n super().__init__(\n action_map=all_actions,\n pic_size=(96, 96),\n **model_config\n )\n\n self.gas_actions = np.array([a[1] == 1 and a[2] == 0 for a in all_actions])\n self.break_actions = np.array([a[2] > 0 for a in all_actions])\n self.n_gas_actions = self.gas_actions.sum()\n self.neg_reward_counter = 0\n self.max_neg_rewards = max_negative_rewards\n\n\n\n def get_random_action(self):\n# give priority to acceleration actions\n action_weights = 14.0 * self.gas_actions + 1.0\n action_weights /= np.sum(action_weights)\n\n return np.random.choice(self.dim_actions, p=action_weights)\n\n def check_early_stop(self, reward, totalreward, fie):\n if reward < 0 and fie > 10:\n self.neg_reward_counter += 1\n done = (self.neg_reward_counter > self.max_neg_rewards)\n\n if done and totalreward <= 500:\n punishment = -20.0\n else:\n punishment = 0.0\n if done:\n self.neg_reward_counter = 0\n\n return done, punishment\n else:\n self.neg_reward_counter = 0\n return False, 0.0"
},
{
"alpha_fraction": 0.5385638475418091,
"alphanum_fraction": 0.605053186416626,
"avg_line_length": 29.1200008392334,
"blob_id": "455de2afcffa2e923968df8963b1d37dbc604a46",
"content_id": "fb97ca98f7032c45389209e9c7d0da3b7a61fb0e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 752,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 25,
"path": "/Umbrella_Academy_INFO7390_Project/INFO7390_Notebooks/modules/RL_processimage.py",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nimport cv2\nimport numpy as np\nfrom skimage import color, transform, io\n\n\nclass processimage:\n def process_image(obs):\n #uncomment to see original image\n # plt.imshow(obs)\n # plt.show()\n\n obs1 = obs.astype(np.uint8)\n obs_gray = color.rgb2gray(obs1)\n # obs_gray[abs(obs_gray - 0.60116) < 0.1] = 1\n obs_gray[84:95,0:12] = 0\n obs_gray[abs(obs_gray - 0.68616) < 0.0001] = 1\n obs_gray[abs(obs_gray - 0.75630) < 0.0001] = 1\n #uncomment to see pre processed image\n # plt.imshow(obs_gray, cmap='gray')\n # plt.show()\n\n #Set values between -1 and 1 for input normalization\n return 2 * obs_gray - 1"
},
{
"alpha_fraction": 0.4809111952781677,
"alphanum_fraction": 0.7597553133964539,
"avg_line_length": 24.76630401611328,
"blob_id": "9fc1bdc2a61a9e8a10f9fae522611216a1e90393",
"content_id": "06b3a143ef066398c4f53690c670c32af2092d01",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 4741,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 184,
"path": "/requirements_RL.txt",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "# This file may be used to create an environment using:\n# $ conda create --name <env> --file <this file>\n# platform: osx-64\nabsl-py=0.11.0=py36h79c6626_0\nappnope=0.1.2=py36hecd8cb5_1001\nargon2-cffi=20.1.0=py36haf1e3a3_1\nastor=0.8.1=pyh9f0ad1d_0\nastunparse=1.6.3=pypi_0\nasync_generator=1.10=py36h28b3542_0\nattrs=20.3.0=pyhd3eb1b0_0\nbackcall=0.2.0=py_0\nblas=1.0=mkl\nbleach=3.2.1=py_0\nbox2d-py=2.3.8=pypi_0\nbzip2=1.0.8=h1de35cc_0\nc-ares=1.17.1=hc929b4f_0\nca-certificates=2020.12.5=h033912b_0\ncachetools=4.2.0=pypi_0\ncairo=1.14.12=hc4e6be7_4\ncertifi=2020.12.5=py36h79c6626_0\ncffi=1.14.4=py36h2125817_0\nchardet=4.0.0=pypi_0\ncloudpickle=1.6.0=pypi_0\ncycler=0.10.0=py_2\ndataclasses=0.7=py36_0\ndecorator=4.4.2=py_0\ndefusedxml=0.6.0=py_0\nentrypoints=0.3=py36_0\nffmpeg=4.2.2=h97e5cf8_0\nflatbuffers=1.12=pypi_0\nfontconfig=2.13.0=h5d5b041_1\nfreetype=2.10.4=h3f75d11_0\nfuture=0.18.2=pypi_0\ngast=0.2.2=pypi_0\ngettext=0.19.8.1=hb0f4f8b_2\ngiflib=5.2.1=haf1e3a3_0\nglib=2.66.1=h9bbe63b_0\ngmp=6.1.2=hb37e062_1\ngnutls=3.6.5=h91ad68e_1002\ngoogle-auth=1.24.0=pypi_0\ngoogle-auth-oauthlib=0.4.2=pypi_0\ngoogle-pasta=0.2.0=pyh8c360ce_0\ngraphite2=1.3.14=h38d11af_0\ngrpcio=1.32.0=pypi_0\ngym=0.17.3=dev_0\nh5py=2.10.0=nompi_py36h106b333_102\nharfbuzz=2.4.0=h831d699_1\nhdf5=1.10.5=nompi_h0cbb7df_1103\nicu=58.2=h0a44026_3\nidna=2.10=pypi_0\nimageio=2.9.0=pypi_0\nimportlib-metadata=2.0.0=py_1\nimportlib_metadata=2.0.0=1\nintel-openmp=2019.4=233\nipykernel=5.3.4=py36h5ca1d4c_0\nipython=7.16.1=py36h5ca1d4c_0\nipython_genutils=0.2.0=py_1\njasper=1.900.1=h1f36771_4\njedi=0.17.0=py36_0\njinja2=2.11.2=py_0\njpeg=9d=hbcb3906_0\njsonschema=3.2.0=py_2\njupyter_client=6.1.7=py_0\njupyter_core=4.7.0=py36h79c6626_0\njupyterlab_pygments=0.1.2=py_0\nkeras=2.4.3=pypi_0\nkeras-applications=1.0.8=py_1\nkeras-preprocessing=1.1.2=pypi_0\nkiwisolver=1.2.0=py36h863e41a_0\nlame=3.100=h1de35cc_0\nlcms2=2.11=h11f7e16_1\nlibblas=3.8.0=14_mkl\nlibcblas=3.8.0=14_mkl\nlibcxx=10.0.0=1\nlibedit=3.1.20191231=h1de35cc_1\nlibffi=3.3=hb1e8313_2\nlibgfortran=3.0.1=0\nlibiconv=1.16=h1de35cc_0\nliblapack=3.8.0=14_mkl\nliblapacke=3.8.0=14_mkl\nlibopencv=4.2.0=py36_5\nlibopus=1.3.1=h1de35cc_0\nlibpng=1.6.37=h7cec526_2\nlibprotobuf=3.12.3=hab81aa3_2\nlibsodium=1.0.17=h01d97ff_0\nlibtiff=4.1.0=hcb84e12_1\nlibuv=1.40.0=haf1e3a3_0\nlibvpx=1.7.0=h378b8a2_0\nlibwebp=1.0.2=hd3bf737_5\nlibxml2=2.9.10=h7cdb67c_3\nlz4-c=1.9.2=h4a8c4bd_1\nmarkdown=3.3.3=pyh9f0ad1d_0\nmarkupsafe=1.1.1=py36h1de35cc_0\nmatplotlib=3.3.2=py36h79c6626_1\nmatplotlib-base=3.3.2=py36h181983e_0\nmistune=0.8.4=py36h1de35cc_0\nmkl=2019.4=233\nmkl-service=2.3.0=py36h9ed2024_0\nmkl_fft=1.2.0=py36hc64f4ea_0\nmkl_random=1.1.1=py36h959d312_0\nmock=4.0.3=pypi_0\nnb_conda=2.2.1=py36_0\nnb_conda_kernels=2.3.1=py36h79c6626_0\nnbclient=0.5.1=py_0\nnbconvert=6.0.7=py36_0\nnbformat=5.0.8=py_0\nncurses=6.2=h0a44026_1\nnest-asyncio=1.4.3=pyhd3eb1b0_0\nnettle=3.4.1=h3018a27_0\nnetworkx=2.5=pypi_0\nninja=1.10.2=py36hf7b0b51_0\nnotebook=6.1.4=py36_0\nnumpy=1.19.4=pypi_0\nnumpy-base=1.19.2=py36hcfb5961_0\noauthlib=3.1.0=pypi_0\nolefile=0.46=pyh9f0ad1d_1\nopencv=4.2.0=py36_5\nopenh264=2.1.0=hd9629dc_0\nopenssl=1.1.1i=h35c211d_0\nopt-einsum=3.3.0=pypi_0\npackaging=20.8=pyhd3eb1b0_0\npandoc=2.11=h0dc7051_0\npandocfilters=1.4.3=py36hecd8cb5_1\nparso=0.8.1=pyhd3eb1b0_0\npcre=8.44=hb1e8313_0\npexpect=4.8.0=pyhd3eb1b0_3\npickleshare=0.7.5=pyhd3eb1b0_1003\npillow=7.2.0=pypi_0\npip=20.3.3=py36hecd8cb5_0\npixman=0.40.0=haf1e3a3_0\nprometheus_client=0.9.0=pyhd3eb1b0_0\nprompt-toolkit=3.0.8=py_0\nprotobuf=3.12.3=py36h0130604_0\nptyprocess=0.6.0=pyhd3eb1b0_2\npy-opencv=4.2.0=py36h95af2a2_5\npyasn1=0.4.8=pypi_0\npyasn1-modules=0.2.8=pypi_0\npycparser=2.20=py_2\npyglet=1.5.11=pypi_0\npygments=2.7.3=pyhd3eb1b0_0\npyopengl=3.1.1a1=py36_0\npyparsing=2.4.7=py_0\npyrsistent=0.17.3=py36haf1e3a3_0\npython=3.6.12=h26836e1_2\npython-dateutil=2.8.1=py_0\npython_abi=3.6=1_cp36m\npytorch=1.7.1=py3.6_0\npywavelets=1.1.1=pypi_0\npyyaml=5.3.1=pypi_0\npyzmq=19.0.1=py36h820b253_0\nreadline=8.0=h1de35cc_0\nrequests=2.25.1=pypi_0\nrequests-oauthlib=1.3.0=pypi_0\nrsa=4.6=pypi_0\nscikit-image=0.17.2=pypi_0\nscipy=1.5.4=pypi_0\nsend2trash=1.5.0=pyhd3eb1b0_1\nsetuptools=51.0.0=py36hecd8cb5_2\nsix=1.15.0=py36hecd8cb5_0\nsqlite=3.33.0=hffcf06c_0\ntensorboard=1.15.0=pypi_0\ntensorboard-plugin-wit=1.7.0=pypi_0\ntensorflow=1.15.0rc3=pypi_0\ntensorflow-estimator=1.15.1=pypi_0\ntermcolor=1.1.0=py_2\nterminado=0.9.1=py36_0\ntestpath=0.4.4=py_0\ntifffile=2020.9.3=pypi_0\ntk=8.6.10=hb0a8c7a_0\ntornado=6.1=py36h6941dd6_0\ntraitlets=4.3.3=py36h9f0ad1d_1\ntyping_extensions=3.7.4.3=py_0\nurllib3=1.26.2=pypi_0\nwcwidth=0.2.5=py_0\nwebencodings=0.5.1=py36_1\nwerkzeug=1.0.1=pyh9f0ad1d_0\nwheel=0.36.2=pyhd3eb1b0_0\nwrapt=1.12.1=py36h7b4949f_2\nx264=1!157.20191217=h1de35cc_0\nxz=5.2.5=h1de35cc_0\nzeromq=4.3.2=h6de7cb9_2\nzipp=3.4.0=pyhd3eb1b0_0\nzlib=1.2.11=h1de35cc_3\nzstd=1.4.5=h0384e3a_1\n"
},
{
"alpha_fraction": 0.4937853217124939,
"alphanum_fraction": 0.5299435257911682,
"avg_line_length": 28.53333282470703,
"blob_id": "ef1498ceee214c9cdc7dfd59acbea44c78dc9f8a",
"content_id": "f426286c99cc7bce75c054ee4c1513c30affa25f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 885,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 30,
"path": "/Umbrella_Academy_INFO7390_Project/INFO7390_Notebooks/modules/SL_model.py",
"repo_name": "ManaliSharma/Self_Driving_Cars_Supervised_And_Reinforcement_Learning",
"src_encoding": "UTF-8",
"text": "from torch import nn\n\nclass CustomModel(nn.Module):\n \"\"\"\n from alexnet\n \"\"\"\n def __init__(self):\n super(CustomModel, self).__init__()\n num_classes = 3\n self.features = nn.Sequential(\n nn.Conv2d(3, 32, kernel_size=7, stride=4, padding=2),\n nn.ReLU(inplace=True),\n nn.MaxPool2d(kernel_size=3, stride=2),\n nn.Conv2d(32, 64, kernel_size=5, padding=2),\n nn.ReLU(inplace=True),\n nn.MaxPool2d(kernel_size=3, stride=2),\n )\n\n self.classifier = nn.Sequential(\n nn.Dropout(),\n nn.Linear(576, 100),\n nn.ReLU(inplace=True),\n nn.Dropout(),\n nn.Linear(100, num_classes),\n )\n\n def forward(self, input):\n input = self.features(input)\n input = input.view(input.size(0), -1)\n return self.classifier(input)"
}
] | 8 |
phon3/Poloniex-Lending-Bot | https://github.com/phon3/Poloniex-Lending-Bot | 684faa6ade54f67020a3fd0bf99deaf15942e5b0 | db439688cc1fc6c7b97bc1af1c052f5b238ea84c | f86052ceb546edcc2d878c3baef283f23f31c57b | refs/heads/master | 2021-01-10T06:18:21.481425 | 2017-09-05T15:32:32 | 2017-09-05T15:32:32 | 51,522,470 | 7 | 1 | null | null | null | null | null | [
{
"alpha_fraction": 0.679852306842804,
"alphanum_fraction": 0.7021360993385315,
"avg_line_length": 33.78899002075195,
"blob_id": "f8140a9d21e6eb4db9d534b76160d3596d713954",
"content_id": "f5409a00d1944455c6d0bf175e4c3a233878c807",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7584,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 218,
"path": "/poloniexlendingbot-master/lendingbot.py",
"repo_name": "phon3/Poloniex-Lending-Bot",
"src_encoding": "UTF-8",
"text": "import io, sys, time, datetime, urllib2, json\nfrom poloniex import Poloniex\nfrom ConfigParser import SafeConfigParser\nfrom Logger import Logger\nfrom decimal import *\n\nSATOSHI = Decimal(10) ** -8\n\nconfig = SafeConfigParser()\nconfig_location = 'default.cfg'\n\ndefaultconfig =\\\n\"\"\"\n[API]\napikey = YourAPIKey\nsecret = YourSecret\n\n[BOT]\n#sleep between iterations, time in seconds\nsleeptime = 60\n#minimum daily lend rate in percent\nmindailyrate = 0.01\n#max rate. 2% is good choice because it's default at margin trader interface. 5% is max to be accepted by the exchange\nmaxdailyrate = 2\n#The number of offers to split the available balance uniformly across the [gaptop, gapbottom] range.\nspreadlend = 3\n#The depth of lendbook (in percent of lendable balance) to move through before placing the first (gapbottom) and last (gaptop) offer.\n#if gapbottom is set to 0, the first offer will be at the lowest possible rate. However some low value is recommended (say 10%) to skip dust offers\ngapbottom = 1\ngaptop = 100\n#Daily lend rate threshold after which we offer lends for 60 days as opposed to 2. If set to 0 all offers will be placed for a 2 day period\nsixtydaythreshold = 0.2\n#custom config per coin, useful when closing positions etc.\n#syntax: [COIN:mindailyrate:maxactiveamount, ... COIN:mindailyrate:maxactiveamount]\n#if maxactive amount is 0 - stop lending this coin. in the future you'll be able to limit amount to be lent.\n#coinconfig = [\"BTC:0.18:1\",\"CLAM:0.6:1\"]\n\"\"\"\n\nloadedFiles = config.read([config_location])\n#Create default config file if not found\nif len(loadedFiles) != 1:\n\tconfig.readfp(io.BytesIO(defaultconfig))\n\twith open(config_location, \"w\") as configfile:\n\t\tconfigfile.write(defaultconfig)\n\t\tprint 'Edit default.cnf file with your api key and secret values'\n\t\texit(0)\n\n\nsleepTime = float(config.get(\"BOT\",\"sleeptime\"))\nminDailyRate = Decimal(config.get(\"BOT\",\"mindailyrate\"))/100\nmaxDailyRate = Decimal(config.get(\"BOT\",\"maxdailyrate\"))/100\nspreadLend = int(config.get(\"BOT\",\"spreadlend\"))\ngapBottom = Decimal(config.get(\"BOT\",\"gapbottom\"))\ngapTop = Decimal(config.get(\"BOT\",\"gaptop\"))\nsixtyDayThreshold = float(config.get(\"BOT\",\"sixtydaythreshold\"))/100\n\ntry:\n\tcoincfg = {} #parsed\n\tcoinconfig = (json.loads(config.get(\"BOT\",\"coinconfig\")))\n\t#coinconfig parser\n\tfor cur in coinconfig:\n\t\tcur = cur.split(':')\n\t\tcoincfg[cur[0]] = dict(minrate=(Decimal(cur[1]))/100, maxactive=Decimal(cur[2]))\nexcept Exception as e:\n\tpass\n\t\n#sanity checks\nif sleepTime < 1 or sleepTime > 3600:\n\tprint \"sleeptime value must be 1-3600\"\n\texit(1)\nif minDailyRate < 0.00003 or minDailyRate > 0.05: # 0.003% daily is 1% yearly\n\tprint \"mindaily rate is set too low or too high, must be 0.003-5%\"\n\texit(1)\nif maxDailyRate < 0.00003 or maxDailyRate > 0.05:\n\tprint \"maxdaily rate is set too low or too high, must be 0.003-5%\"\n\texit(1)\nif spreadLend < 1 or spreadLend > 20:\n\tprint \"spreadlend value must be 1-20 range\"\n\texit(1)\n\ndryRun = False\ntry:\n\tif sys.argv.index('--dryrun') > 0:\n\t\tdryRun = True\nexcept ValueError:\n\tpass\n\ndef timestamp():\n\tts = time.time()\n\treturn datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n\nbot = Poloniex(config.get(\"API\",\"apikey\"), config.get(\"API\",\"secret\"))\nlog = Logger()\n\n#total lended global variable\ntotalLended = {}\n\ndef refreshTotalLended():\n global totalLended\n\tcryptoLended = bot.returnActiveLoans()\n\n\ttotalLended = {}\n\tcryptoLendedSum = Decimal(0)\n\n\tfor item in cryptoLended[\"provided\"]:\n\t\titemStr = item[\"amount\"].encode(\"utf-8\")\n\t\titemFloat = Decimal(itemStr)\n\t\tif item[\"currency\"] in totalLended:\n\t\t\tcryptoLendedSum = totalLended[item[\"currency\"]] + itemFloat\n\t\t\ttotalLended[item[\"currency\"]] = cryptoLendedSum\n\t\telse:\n\t\t\tcryptoLendedSum = itemFloat\n\t\t\ttotalLended[item[\"currency\"]] = cryptoLendedSum\n\ndef stringifyTotalLended():\n\tresult = 'Lended: '\n\tfor key in sorted(totalLended):\n\t\tresult += '[' + \"%.3f\" % Decimal(totalLended[key]) + ' '\n\t\tresult += key + '] '\n\treturn result\n\ndef createLoanOffer(cur,amt,rate):\n\tdays = '2'\n\t#if (minDailyRate - 0.000001) < rate and Decimal(amt) > 0.001:\n\tif float(amt) > 0.001:\n\t\trate = float(rate) - 0.000001 #lend offer just bellow the competing one\n\t\tamt = \"%.8f\" % Decimal(amt)\n\t\tif rate > sixtyDayThreshold:\n\t\t\tdays = '60'\n\t\tif sixtyDayThreshold == 0:\n\t\t\tdays = '2'\n\t\tif dryRun == False:\n\t\t\tmsg = bot.createLoanOffer(cur,amt,days,0,rate)\n\t\t\tlog.offer(amt, cur, rate, days, msg)\n\ndef cancelAndLoanAll():\n\tloanOffers = bot.returnOpenLoanOffers('BTC') #some bug with api wrapper? no idea why I have to provide a currency, and then receive every other\n\tif type(loanOffers) is list: #silly api wrapper, empty dict returns a list, which brakes the code later.\n\t\tloanOffers = {}\n\tif loanOffers.get('error'):\n\t\tprint loanOffers.get('error')\n\t\tprint 'You might want to edit config file (default.cnf) and put correct apisecret and key values'\n\t\texit(1)\n\n\tonOrderBalances = {}\n\tfor cur in loanOffers:\n\t\tfor offer in loanOffers[cur]:\n\t\t\tonOrderBalances[cur] = onOrderBalances.get(cur, 0) + Decimal(offer['amount'])\n\t\t\tif dryRun == False:\n\t\t\t\tmsg = bot.cancelLoanOffer(cur,offer['id'])\n\t\t\t\tlog.cancelOrders(cur, msg)\n\n\tlendingBalances = bot.returnAvailableAccountBalances(\"lending\")['lending']\n\tif dryRun == True: #just fake some numbers, if dryrun (testing)\n\t\tif type(lendingBalances) is list: #silly api wrapper, empty dict returns a list, which brakes the code later.\n\t\t\tlendingBalances = {}\n\t\tlendingBalances.update(onOrderBalances)\n\n\tfor activeCur in lendingBalances:\n\n\t\tactiveBal = lendingBalances[activeCur]\n\n\t\t#min daily rate can be changed per currency\n\t\tcurMinDailyRate = minDailyRate\n\t\tif activeCur in coincfg:\n\t\t\tif coincfg[activeCur]['maxactive'] == 0:\n\t\t\t\tlog.log('maxactive amount for ' + activeCur + ' set to 0, won\\'t lend.')\n\t\t\t\tcontinue\n\t\t\tcurMinDailyRate = coincfg[activeCur]['minrate']\n\t\t\tlog.log('Using custom mindailyrate ' + str(coincfg[activeCur]['minrate']*100) + '% for ' + activeCur)\n\n\t\tloans = bot.returnLoanOrders(activeCur)\n\t\ts = Decimal(0) #sum\n\t\ti = int(0) #offer book iterator\n\t\tj = int(0) #spread step count\n\t\tlent = Decimal(0)\n\t\tstep = (gapTop - gapBottom)/spreadLend\n\t\t#TODO check for minimum lendable amount, and try to decrease the spread. e.g. at the moment balances lower than 0.001 won't be lent\n\t\t#in case of empty lendbook, lend at max\n activePlusLended = Decimal(activeBal)\n if activeCur in totalLended:\n activePlusLended += Decimal(totalLended[activeCur])\n\t\tif len(loans['offers']) == 0:\n\t\t\tcreateLoanOffer(activeCur,Decimal(activeBal)-lent,maxDailyRate)\n\t\tfor offer in loans['offers']:\n\t\t\ts = s + Decimal(offer['amount'])\n\t\t\ts2 = s\n\t\t\twhile True:\n\t\t\t\tif s2 > activePlusLended*(gapBottom/100+(step/100*j)) and Decimal(offer['rate']) > curMinDailyRate:\n\t\t\t\t\tj += 1\n\t\t\t\t\ts2 = s2 + Decimal(activeBal)/spreadLend\n\t\t\t\telse:\n\t\t\t\t\tcreateLoanOffer(activeCur,s2-s,offer['rate'])\n\t\t\t\t\tlent = lent + (s2-s).quantize(SATOSHI)\n\t\t\t\t\tbreak\n\t\t\t\tif j == spreadLend:\n\t\t\t\t\tcreateLoanOffer(activeCur,Decimal(activeBal)-lent,offer['rate'])\n\t break\n\t\t\tif j == spreadLend:\n\t\t\t\tbreak\n\t\t\ti += 1\n\t\t\tif i == len(loans['offers']): #end of the offers lend at max\n\t\t\t\tcreateLoanOffer(activeCur,Decimal(activeBal)-lent,maxDailyRate)\n\nif __name__ == '__main__':\n\tlog.log('Welcome to Poloniex Lending Bot')\n\twhile True:\n\t\ttry:\n \t\trefreshTotalLended()\n\t\t\tlog.refreshStatus(stringifyTotalLended())\n\t\t\tcancelAndLoanAll()\n\t except Exception as e:\n \t log.log(\"ERROR: \" + str(e))\n\t\t\tpass\n\t\texcept KeyboardInterrupt:\n\t\t\tprint '\\nbye'\n\t\t\texit(0)\n time.sleep(sleepTime)\n"
},
{
"alpha_fraction": 0.7550082206726074,
"alphanum_fraction": 0.7750410437583923,
"avg_line_length": 44.43283462524414,
"blob_id": "2d3cf9bef95d0da417d8e71c3ed4a4b45a29b2b6",
"content_id": "9611c3dc2d965e01c5543d9719d036e4f4cc4538",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3045,
"license_type": "no_license",
"max_line_length": 273,
"num_lines": 67,
"path": "/poloniexlendingbot-master/README.md",
"repo_name": "phon3/Poloniex-Lending-Bot",
"src_encoding": "UTF-8",
"text": "#Poloniex lending bot\n\nPoloniex lending bot is written in Python for automatic lending on Poloniex exchange.\nIt will lend automatically all cryptocurrencies found in your lending account.\n\nIt uses an advanced lending strategy which will spread offers across the lend book to take advantage of possible spikes in lending rates. Inspired by [MarginBot](https://github.com/HFenter/MarginBot) and [BitfinexLendingBot](https://github.com/eAndrius/BitfinexLendingBot).\n\n##Install\n###Linux\n```\ngit clone https://github.com/Mikadily/poloniexlendingbot\ncd poloniexlendingbot/\npython lendingbot.py\n```\nWhen you first run the script a default.cnf will be generated. Edit it with your apikey and secret values.\n\n###Windows\n1. Install poloniexlendingbot - go to https://github.com/Mikadily/poloniexlendingbot and click the \"Download Zip\" button on the right. Unzip it into any location you choose.\n2. Install Python from https://www.python.org/ftp/python/2.7.10/python-2.7.10.msi . Run the executable. Choose to install the feature Add python.exe to Path on local hard drive during installation; Python should then be installed in C:\\Python27\n3. Check that Python runs. Open a new command prompt as administrator by typing cmd.exe into the Start menu and pressing Ctrl+Shift+Enter. Type python and you should see something like: `Python 2.7.10 (default....`\n4. Go to location where you unzipped the bot and double click (run) lendingbot.py. It will run briefly and generate default.cfg. Open it with your favorite editor, replace YourAPIKey and YourSecret with one's you generated on Poloniex.\n\n5. Double click (run) lendingbot.py again. Off you go!\n\n##Configuration\n\n```\n[API]\napikey = YourAPIKey\nsecret = YourSecret\n\n[BOT]\n#sleep between iterations, time in seconds\nsleeptime = 60\n\n#minimum daily lend rate in percent\nmindailyrate = 0.04\n\n#max rate. 2% is good choice because it's default at margin trader interface.\n#5% is max to be accepted by the exchange\nmaxdailyrate = 2\n\n#The number of offers to split the available balance across the [gaptop, gapbottom] range.\nspreadlend = 3\n\n#The depth of lendbook (in percent of lendable balance) to move through\n#before placing the first (gapbottom) and last (gaptop) offer.\n#If gapbottom is set to 0, the first offer will be at the lowest possible rate.\n#However some low value is recommended (say 10%) to skip dust offers.\ngapbottom = 10\ngaptop = 200\n\n#Daily lend rate threshold after which we offer lends for 60 days as opposed to 2.\n#If set to 0 all offers will be placed for a 2 day period\nsixtydaythreshold = 0.2\n\n#custom config per coin, useful when closing positions etc.\n#syntax: [\"COIN:mindailyrate:maxactiveamount\",...]\n#if maxactive amount is 0 - stop lending this coin. in the future you'll be able to limit amount to be lent.\n#coinconfig = [\"BTC:0.18:1\",\"CLAM:0.6:1\"]\n```\n\nIf `spreadlend = 1` and `gapbottom = 0`, it will behave as simple lending bot lending at lowest possible offer.\n\n##Donations\n\nIf you find it useful, please consider donating some bitcoins: 1MikadW4iKTJ54GVrj7xS1SrZAhLUyZk38\n\n"
},
{
"alpha_fraction": 0.5803345441818237,
"alphanum_fraction": 0.5838823914527893,
"avg_line_length": 26.40277862548828,
"blob_id": "28d8a88f5a0ab5cc9f53c551526d518543b973e2",
"content_id": "f47769893362a86d07b4ad178cbde5fe581d9786",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1973,
"license_type": "no_license",
"max_line_length": 155,
"num_lines": 72,
"path": "/poloniexlendingbot-master/Logger.py",
"repo_name": "phon3/Poloniex-Lending-Bot",
"src_encoding": "UTF-8",
"text": "import sys\nimport time\nimport datetime\nimport atexit\n\nclass ConsoleOutput(object):\n def __init__(self):\n self._status = ''\n\tatexit.register(self._exit)\n\n def _exit(self):\n self._status += ' ' # In case the shell added a ^C\n self.status('')\n\n def status(self, status):\n update = '\\r'\n update += status\n update += ' ' * (len(self._status) - len(status))\n update += '\\b' * (len(self._status) - len(status))\n sys.stderr.write(update)\n self._status = status\n\n def printline(self, line):\n update = '\\r'\n update += line + ' ' * (len(self._status) - len(line)) + '\\n'\n update += self._status\n sys.stderr.write(update)\n\nclass Logger(object):\n def __init__(self):\n self.console = ConsoleOutput()\n self._lended = ''\n\tself.refreshStatus()\n\n def timestamp(self):\n ts = time.time()\n return datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n\n def log(self, msg):\n self.console.printline(self.timestamp() + ' ' + msg)\n self.refreshStatus()\n\n def offer(self, amt, cur, rate, days, msg):\n\tline = self.timestamp() + ' Placing ' + str(amt) + ' ' + str(cur) + ' at ' + str(float(rate)*100) + '% for ' + days + ' days... ' + self.digestApiMsg(msg)\n\tself.console.printline(line)\n\tself.refreshStatus()\n\n def cancelOrders(self, cur, msg):\n\tline = self.timestamp() + ' Canceling all ' + str(cur) + ' orders... ' + self.digestApiMsg(msg)\n\tself.console.printline(line)\n\tself.refreshStatus()\n\n def refreshStatus(self, lended=''):\n\tnow = time.time()\n\tif lended != '':\n\t\tif len(lended) > 99:\n\t\t\t#truncate status, try preventing console bloating\n\t\t\tself._lended = str(lended)[:96] + '...' \n\t\telse:\n\t\t\tself._lended = str(lended)\n\tself.console.status(self._lended)\n\n def digestApiMsg(self, msg):\n\ttry:\n\t m = (msg['message'])\n\texcept KeyError:\n\t pass\n\ttry:\n\t m = (msg['error'])\n\texcept KeyError:\n\t pass\n\treturn m\n"
}
] | 3 |
pshegde96/multilayer-perceptron | https://github.com/pshegde96/multilayer-perceptron | e36114739e2ca4dc844bf2318e43e59af790b62d | 2f57a6fc467493bd5648ba65be38d0149e5acef9 | 686170ad11f297620029328d6289ebd0267fadec | refs/heads/master | 2021-01-20T21:31:58.735837 | 2017-09-01T10:45:49 | 2017-09-01T10:45:49 | 101,766,285 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7341040372848511,
"alphanum_fraction": 0.7437379360198975,
"avg_line_length": 23.66666603088379,
"blob_id": "316dbb6f73aaa19505dd481148e6b6b075526ca7",
"content_id": "73477a99fd79ecd7c27e9d95aae500288f1ea393",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 519,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 21,
"path": "/README.md",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "Programming Assignment 1 of Deep Learning for Image Processing(EE6132) on Multi-Layer Perceptrons\n\n####Requirements:\n\n* Numpy\n* Python-Mnist: Install by:\n`pip install python-mnist`\n\n###Usage:\n\nFirst download the data from the [MNIST Database](http://yann.lecun.com/exdb/mnist/) and place it in the `./data/` directory.\n\nCreate a `./models/` directory to store the trained models. \n\nTo run the program with default parameters do:\n\n`python trainer.py <model-name>`\n\nFor advanced usage check:\n\n`python trainer.py --help`\n\n"
},
{
"alpha_fraction": 0.6588954925537109,
"alphanum_fraction": 0.6699735522270203,
"avg_line_length": 31.164894104003906,
"blob_id": "476df0864694c0409f1fb946893490aab6d9d593",
"content_id": "19e8f8e418d6ed11742a6aee1f8ae746cffd07e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6048,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 188,
"path": "/trainer.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pickle,shutil\nfrom network import Network\nfrom cross_entropy import cross_entropy\nimport matplotlib.pyplot as plt\nimport math\nimport time \nimport argparse \nfrom mnist import MNIST\nimport os\n\n'''Parse CommandLine Arguments ''' \nparser = argparse.ArgumentParser()\nparser.add_argument('model_id',help='Enter the model number') \nparser.add_argument('-activation',help='Activation in the Hidden Layers') \nparser.add_argument('-layers',help='Hidden Layers, pass as string with numbers separated by commas')\nparser.add_argument('-no_iter',help='Number of mini-batch iterations to train',type=int)\nparser.add_argument('-batch_size',help='Batch size',type=int)\nparser.add_argument('-initial_lr',help='Initial Learning Rate',type=float)\nparser.add_argument('-lr_decay',help='Learning Rate Decay every 200 epochs',type=float)\nparser.add_argument('-lambda_reg',help='L2 norm regularization parameter',type=float)\nparser.add_argument('-momentum',help='Momentum Weight',type=float)\nparser.add_argument('-savemodel',help='1 to save,default 0',type=int) #not yet implemented\nparser.add_argument('-modeldir',help='Specify dir to store models with / suffixed.Default:models/') #not yet implemented\nargs = parser.parse_args()\n\n'''Important Parameters'''\nMODEL = './models/'+str(args.model_id)\nBATCH_SIZE = 64\nif args.batch_size:\n BATCH_SIZE = int(args.batch_size)\nLAYERS_SIZE = [784,1000,500,250,10]\nif args.layers:\n LAYERS_SIZE = map(int,args.layers.split(','))\nLEARNING_RATE = 0.3\nif args.initial_lr:\n LEARNING_RATE = float(args.initial_lr)\nLR_DECAY = 1.0 #EVERY 200 ITERATIONS\nif args.lr_decay:\n LR_DECAY = float(args.lr_decay)\nLAMBDA_REG = 0.0\nif args.lambda_reg:\n LAMBDA_REG = float(args.lambda_reg)\nNO_ITER = 8000\nif args.no_iter:\n NO_ITER = int(args.no_iter)\nACTIVATION = 'sigmoid'\nif args.activation:\n ACTIVATION = str(args.activation)\nMOMENTUM = 0.0\nif args.momentum:\n MOMENTUM = float(args.momentum)\n\n'''Print the parameters so that user can verify them '''\nprint 'Architecture: {}'.format(LAYERS_SIZE)\nprint 'Batch Size: {}'.format(BATCH_SIZE)\nprint 'Initial Learning Rate: {}'.format(LEARNING_RATE)\nprint 'Learning Rate Decay every 200 iterations: {}'.format(LR_DECAY)\nprint 'Momentum Weight: {}'.format(MOMENTUM)\nprint 'Lambda of L2 Weight Regularization: {}'.format(LAMBDA_REG)\nprint 'Total Number of Iterations: {}'.format(NO_ITER)\nprint 'Activation in Hidden Layers: {}'.format(ACTIVATION)\n\nif os.path.exists(MODEL):\n print '\\n\\n WARNING!!!: The model id that you are trying to train already exists.'\n print 'If you continue the program the existing model will be deleted \\n\\n\\n'\n\nprint '\\n Press Enter to Continue'\nraw_input()\n\n\n\n'''Load the Data-Set'''\n\ndata = MNIST('./data/')\nX_train,Y_train = data.load_training()\nX_test,Y_test = data.load_testing()\n\nX_train = np.array(X_train)\nY_train = np.array(Y_train)\n\nX_test = np.array(X_test)\nY_test = np.array(Y_test)\n\n\n#Normalize the data\nX_mean = np.mean(X_train,axis=0)\nX_train = X_train-X_mean\nX_std = np.sqrt(np.mean(X_train**2,axis=0))\nX_train = X_train/(X_std+1e-10)\nX_test = (X_test-X_mean)/(X_std+1e-7)\n\n'''Let the training begin '''\nindex = 0 #start from the first element\nnet = Network(LAYERS_SIZE,activation=ACTIVATION)\nnet.init_network()\n\nloss_train = []\nsteps_train = []\nloss_test = []\nsteps_test = []\naccuracy_test = []\n\n#Use try block to stop the training when Ctrl-C is pressed\ntry:\n for step in range(NO_ITER):\n if index+BATCH_SIZE >= X_train.shape[0]:\n index = 0\n #permute the data to instill a sense of random sampling\n permute = np.random.permutation(X_train.shape[0])\n X_train = X_train[permute]\n Y_train = Y_train[permute]\n\n X_batch = X_train[index:index+BATCH_SIZE]\n Y_batch = Y_train[index:index+BATCH_SIZE]\n\n Y_hat = net.forward_pass(X_batch)\n\n #Record the training loss\n loss = cross_entropy(Y_hat,Y_batch,one_hot='False')\n loss_train.append(loss)\n steps_train.append(step)\n\n #Update parameters\n net.backward_pass(Y_batch,LAMBDA_REG,LEARNING_RATE= LEARNING_RATE,MOMENTUM=MOMENTUM)\n for layer in net.layers:\n layer.W += layer.dW_v\n layer.b += layer.db_v\n\n if step%200 == 0:\n #compute test loss\n LEARNING_RATE *= LR_DECAY\n Y_hat_test = net.forward_pass(X_test)\n loss_test1 = cross_entropy(Y_hat_test,Y_test,one_hot='False')\n\n #Also compute the test accuracy\n p_test = net.forward_pass(X_test)\n Y_test_hat = np.zeros_like(p_test)\n Y_test_onehot = np.zeros_like(p_test)\n for i in range(len(Y_test)):\n Y_test_hat[i,np.argmax(p_test[i])]=1\n Y_test_onehot[i,Y_test[i]] =1\n test_accuracy = np.sum(Y_test_hat*Y_test_onehot)/Y_test.shape[0]\n\n #Record data\n steps_test.append(step)\n loss_test.append(loss_test1)\n accuracy_test.append(test_accuracy)\n\n print 'STEP: {} \\t BATCH LOSS: {} \\t TEST LOSS: {} \\t TEST ACCURACY: {}'.format(step,loss,loss_test1,test_accuracy)\n\n index += BATCH_SIZE\n\n#If Ctrl-C is pressed, exit the training\nexcept KeyboardInterrupt:\n print '\\n'\n\n\np_test = net.forward_pass(X_test)\nY_test_hat = np.zeros_like(p_test)\nY_test_onehot = np.zeros_like(p_test)\nfor i in range(len(Y_test)):\n Y_test_hat[i,np.argmax(p_test[i])]=1\n Y_test_onehot[i,Y_test[i]] =1\n\nprint np.sum(Y_test_hat*Y_test_onehot)/Y_test.shape[0]\n\n'''Save the model'''\n\nfor layer in net.layers:\n del layer.dW\n del layer.dW_v\n del layer.db\n del layer.db_v\n del layer.X\n del layer.Z\n del layer.A\n\n\nif os.path.exists(MODEL):\n shutil.rmtree(MODEL)\nos.makedirs(MODEL)\nwith open(MODEL+'/weights.pkl','wb') as output:\n pickle.dump(net,output,pickle.HIGHEST_PROTOCOL)\n\n#Also save the important data\nwith open(MODEL+'/data.pkl','wb') as output:\n pickle.dump([steps_train,loss_train,steps_test,loss_test,accuracy_test],output,pickle.HIGHEST_PROTOCOL)\n\n"
},
{
"alpha_fraction": 0.5807560086250305,
"alphanum_fraction": 0.6048110127449036,
"avg_line_length": 21.384614944458008,
"blob_id": "905ff57dfebf28e4184392aebf0e82a7f89476ea",
"content_id": "2f7ceccc6b6dd84ed0d581434dc50124a6c2b602",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 291,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 13,
"path": "/softmax.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\n'''\nComputes the softmax of a matrix considering the rows as input variables\n'''\ndef softmax(x,tmp=1):\n big = np.max(x,axis=1)\n x = x-big.reshape(-1,1)\n exp = np.exp(x*tmp)\n return exp/(np.sum(exp,axis=1)).reshape(-1,1)\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6209677457809448,
"alphanum_fraction": 0.6370967626571655,
"avg_line_length": 18.105262756347656,
"blob_id": "2c9d223e419398e7a691e7f501d2b4614d312590",
"content_id": "ff9e97cf1393b20b0e815703cfffc15ac3c90d25",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 372,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 19,
"path": "/activations.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "'''\nContains all the activation functions implemented along with their derivatives\n'''\nimport numpy as np\n\ndef sigmoid_fn(X):\n return 1/(1+np.exp(-X))\n\ndef sigmoid_derivative(X):\n sigm = sigmoid_fn(X)\n return sigm*(1-sigm)\n\ndef relu_fn(X):\n return np.clip(X,0,None)\n\ndef relu_derivative(X):\n der = np.zeros_like(X)\n der[X>=0] = 1\n return der\n \n"
},
{
"alpha_fraction": 0.5125913619995117,
"alphanum_fraction": 0.5194963216781616,
"avg_line_length": 38.07936477661133,
"blob_id": "7000652e41214a5ed80d78b001744f92e0b04ec1",
"content_id": "939584bd7b47f2a41be079de8c1accf7776d2d9e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2462,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 63,
"path": "/network.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "import numpy as np\nfrom layers import Layer\nfrom softmax import softmax\n\nclass Network:\n\n def __init__(self,layers_size,activation='relu',task='classification'):\n self.layers_size = layers_size\n self.activation = activation\n self.task = task\n\n def init_network(self):\n \n self.layers = []\n #initialize all layers except the last one witht the specified activation\n for l in range(len(self.layers_size)-2):\n self.layers.append(Layer(activation='relu',\n in_dim=self.layers_size[l],\n out_dim=self.layers_size[l+1]))\n self.layers[l].init_variables() #initialize the weights of the layer\n\n #Now add the final softmax layer\n self.layers.append(Layer(activation='linear',\n in_dim=self.layers_size[-2],\n out_dim=self.layers_size[-1],\n posn='final'))\n self.layers[-1].init_variables() #initialize the weights of the layer\n\n def forward_pass(self,X):\n X_new = np.copy(X)\n\n for layer in self.layers:\n X_old = np.copy(X_new)\n X_new = layer.forward(X_old)\n\n if self.task == 'classification':\n self.Y_hat = softmax(X_new)\n #Yet to implement for regression\n else:\n pass\n\n return self.Y_hat \n\n def backward_pass(self,Y_vec,LAMBDA_REG=0,LEARNING_RATE=0.1,MOMENTUM=0.3):\n \n #encode Y_vec in one-hot form\n Y = np.zeros_like(self.Y_hat)\n Y[range(self.Y_hat.shape[0]),Y_vec] = 1\n delta_plus = (self.Y_hat - Y)/self.Y_hat.shape[0] \n\n #process the final layer differently:\n delta_plus = self.layers[-1].backward(delta_plus=delta_plus,\n W_plus=None,LAMBDA_REG=LAMBDA_REG,\n LEARNING_RATE=LEARNING_RATE,\n MOMENTUM=MOMENTUM)\n\n #go backwards through the layers, omitting the last layer\n for i in range(len(self.layers)-1):\n delta_plus = self.layers[-2-i].backward(delta_plus=delta_plus,\n W_plus=np.copy(self.layers[-1-i].W),\n LAMBDA_REG=LAMBDA_REG,\n LEARNING_RATE=LEARNING_RATE,\n MOMENTUM=MOMENTUM)\n"
},
{
"alpha_fraction": 0.5619094967842102,
"alphanum_fraction": 0.5698657631874084,
"avg_line_length": 33.67241287231445,
"blob_id": "cb462feaa329118c16111c1a2cfd431049d72bea",
"content_id": "f06d9a39d6f721897f65be9bc9e4a63e24c3b7e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2011,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 58,
"path": "/layers.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport activations as act\n\nclass Layer:\n\n def __init__(self,activation='relu',in_dim=1,out_dim=1,posn='hidden'):\n self.activation = activation\n self.in_dim = in_dim\n self.out_dim = out_dim\n self.posn=posn\n\n #Initialize the weight matrix and the bias vector via Xavier Initialization\n def init_variables(self):\n self.W = 0.1*np.random.randn(self.in_dim,self.out_dim)/np.sqrt(self.in_dim)\n self.b = 0.1*np.ones((1,self.out_dim)) #initialize with a small +ve value so that relu neurons don't go to 0 at birth\n #momentum parameters; initialized with 0\n self.dW_v = np.zeros_like(self.W)\n self.db_v = np.zeros_like(self.b)\n\n '''\n The operation is A = f(Z)\n Z = XW\n '''\n def forward(self,X):\n self.X = X\n self.Z = X.dot(self.W)+self.b\n \n if self.activation == 'linear':\n self.A = self.Z\n elif self.activation == 'sigmoid':\n self.A = act.sigmoid_fn(self.Z)\n else :\n self.A = act.relu_fn(self.Z)\n\n return self.A\n\n def backward(self,delta_plus,W_plus,LAMBDA_REG=0,\n LEARNING_RATE=0.1,MOMENTUM=0.3):\n \n #process the final layer differently\n if self.posn == 'final':\n delta = np.copy(delta_plus)\n \n else:\n if self.activation == 'linear':\n f_derivative = np.ones_like(self.Z)\n elif self.activation == 'sigmoid':\n f_derivative = act.sigmoid_derivative(self.Z)\n else:\n f_derivative = act.relu_derivative(self.Z)\n delta = (delta_plus.dot(W_plus.T))*f_derivative\n\n self.dW = self.X.T.dot(delta) + LAMBDA_REG*self.W \n self.db = np.ones((1,self.X.shape[0])).dot(delta)\n self.dW_v = MOMENTUM*self.dW_v - LEARNING_RATE*self.dW\n self.db_v = MOMENTUM*self.db_v - LEARNING_RATE*self.db\n #return delta to calc grad for the previous layer\n return delta\n"
},
{
"alpha_fraction": 0.5816794037818909,
"alphanum_fraction": 0.6290076375007629,
"avg_line_length": 22.962963104248047,
"blob_id": "7787ded868925d5edbe55246de1d7fdc833a97e8",
"content_id": "4a9e4ab5081c01199f34e30e373e5fc4b3708189",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1310,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 54,
"path": "/numerical_gradient.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport cPickle,gzip\nfrom network import Network\nfrom cross_entropy import cross_entropy\nimport copy\n\n'''Important Parameters'''\nBATCH_SIZE = 64\nLAYERS_SIZE = [784,50,20,10]\nLEARNING_RATE = 0.001\nLR_DECAY = 0.85 #EVERY 200 ITERATIONS\nLAMBDA_REG = 0.005\nNO_ITER = 8000\nh = 1e-5\n\n\n\n'''Load the Data-Set'''\nf = gzip.open('mnist.pkl.gz','rb')\ntrain_set,val_set,test_set = cPickle.load(f)\nf.close()\n\nX_train =train_set[0]\nY_train = train_set[1]\nX_test = test_set[0]\nY_test = test_set[1]\n\n'''Let the training begin '''\nnet = Network(LAYERS_SIZE,activation='sigmoid')\nnet.init_network()\n\n\nX_batch = X_train[0:1000]\nY_batch = Y_train[0:1000]\n\nY_hat = net.forward_pass(X_batch)\n\n#Calculate Numerical Gradient\nnet.backward_pass(Y_batch)\ndiff = 0\ncount = 0\nfor k in range(len(net.layers)):\n for i in range(net.layers[k].W.shape[0]):\n for j in range(net.layers[k].W.shape[1]):\n net2 = copy.deepcopy(net)\n net2.layers[k].W[i,j] += h\n f1 = cross_entropy(net2.forward_pass(X_batch),Y_batch,one_hot='False')\n net2.layers[k].W[i,j] -= 2*h\n f2 = cross_entropy(net2.forward_pass(X_batch),Y_batch,one_hot='False')\n diff += (net.layers[k].dW[i,j] - (f1-f2)/2/h)**2\n count +=1\n print count\n\nprint diff/count\n\n \n\n\n"
},
{
"alpha_fraction": 0.5308219194412231,
"alphanum_fraction": 0.5547945499420166,
"avg_line_length": 21.461538314819336,
"blob_id": "621a76a55bffcdb02bc194ce68f4f83989538789",
"content_id": "4a13229b2111754f1c4e761a7c1790e5242c0744",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 292,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 13,
"path": "/cross_entropy.py",
"repo_name": "pshegde96/multilayer-perceptron",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\ndef cross_entropy(Y_hat,y,one_hot='True'):\n\n if one_hot == 'False':\n Y = np.zeros_like(Y_hat)\n Y[range(y.shape[0]),y] = 1\n else:\n Y=y\n\n inter = Y*np.log(Y_hat+1e-7)\n cross_entropy = -1.0/Y.shape[0]*(np.sum(inter))\n return cross_entropy\n"
}
] | 8 |
AK0055/vOCR-tran | https://github.com/AK0055/vOCR-tran | ea68e68fd4fbfcd0603e41e97766edaab0b1a778 | b933f7ed0e2c8d4948cb1acf21ade1bc6c85073b | 01b5531b6e59bde4ddcfc91f888762394a404de3 | refs/heads/main | 2023-05-31T13:29:13.361989 | 2021-06-23T02:23:54 | 2021-06-23T02:23:54 | 379,230,644 | 4 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5988065004348755,
"alphanum_fraction": 0.626807451248169,
"avg_line_length": 32.156864166259766,
"blob_id": "79fe301d3cf88d08fe961c78e9c889b9d2c9b611",
"content_id": "63194f7d449c22f9a172496f18b9fc1a428bfbc0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8714,
"license_type": "permissive",
"max_line_length": 141,
"num_lines": 255,
"path": "/OCRtran.py",
"repo_name": "AK0055/vOCR-tran",
"src_encoding": "UTF-8",
"text": "import tkinter\r\nfrom tkinter import messagebox\r\nfrom tkinter import *\r\nfrom tkinter import ttk\r\nfrom tkinter import filedialog\r\nimport cv2\r\nimport pytesseract\r\nimport os\r\nimport pyttsx3\r\nfrom translate import Translator\r\nfrom langdetect import detect\r\n\r\nengine = pyttsx3.init(\"sapi5\")\r\npytesseract.pytesseract.tesseract_cmd = 'C:\\\\Program Files (x86)\\\\Tesseract-OCR\\\\tesseract.exe'\r\nlangc=''\r\nlangin=''\r\nvid=''\r\ncode=''\r\ntran=''\r\ndef langcf(r):\r\n global langc\r\n langc=r\r\n print(langc)\r\ndef langinf(r):\r\n global langin\r\n langin=r\r\n print(langin)\r\ndef voice(t):\r\n global tran\r\n tran=t\r\n engine.say(tran)\r\n engine.runAndWait()\r\nl1='english'\r\nl2='german'\r\nl3='french'\r\nl4='spanish'\r\nl5='japanese'\r\nl6='chinese'\r\nl7='korean'\r\nl8='russian'\r\nl9='italian'\r\nl10='portuguese'\r\ntop=Tk()\r\nm=tkinter.Tk()\r\n\r\n\r\ndef button():\r\n top.button = ttk.Button(top.labelFrame, text = \"Browse Image\",command =lambda: fileDialog)\r\n top.button.grid(column = 1, row = 1)\r\ndef button1():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"English\",command=lambda: langcf(l1))\r\n top.button1.grid(column = 1, row = 2)\r\ndef button2():\r\n top.button = ttk.Button(top.labelFrame, text = \"German\",command=lambda: langcf(l2))\r\n top.button.grid(column = 1, row = 3)\r\ndef button3():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"French\",command = lambda: langcf(l3))\r\n top.button1.grid(column = 1, row = 4)\r\ndef button4():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Spanish\",command=lambda: langcf(l4))\r\n top.button1.grid(column = 1, row = 5)\r\ndef button5():\r\n top.button = ttk.Button(top.labelFrame, text = \"Japanese\",command=lambda: langcf(l5))\r\n top.button.grid(column = 1, row = 6)\r\ndef button6():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Chinese\",command=lambda: langcf(l6))\r\n top.button1.grid(column = 1, row = 7)\r\ndef button7():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Korean\",command=lambda: langcf(l7))\r\n top.button1.grid(column = 1, row = 8)\r\ndef button8():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Russian\",command=lambda: langcf(l8))\r\n top.button1.grid(column = 1, row = 9)\r\ndef button9():\r\n top.button = ttk.Button(top.labelFrame, text = \"Italian\",command=lambda: langcf(l9))\r\n top.button.grid(column = 1, row = 10)\r\ndef button10():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Portuguese\",command=lambda: langcf(l10))\r\n top.button1.grid(column = 1, row = 11)\r\n \r\ndef Button1():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"English\",command=lambda: langinf(l1))\r\n top.button1.grid(column = 1, row = 2)\r\ndef Button2():\r\n top.button = ttk.Button(top.labelFrame, text = \"German\",command=lambda: langinf(l2))\r\n top.button.grid(column = 1, row = 3)\r\ndef Button3():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"French\",command = lambda: langinf(l3))\r\n top.button1.grid(column = 1, row = 4)\r\ndef Button4():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Spanish\",command=lambda: langinf(l4))\r\n top.button1.grid(column = 1, row = 5)\r\ndef Button5():\r\n top.button = ttk.Button(top.labelFrame, text = \"Japanese\",command=lambda: langinf(l5))\r\n top.button.grid(column = 1, row = 6)\r\ndef Button6():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Chinese\",command=lambda: langinf(l6))\r\n top.button1.grid(column = 1, row = 7)\r\ndef Button7():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Korean\",command=lambda: langinf(l7))\r\n top.button1.grid(column = 1, row = 8)\r\ndef Button8():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Russian\",command=lambda: langinf(l8))\r\n top.button1.grid(column = 1, row = 9)\r\ndef Button9():\r\n top.button = ttk.Button(top.labelFrame, text = \"Italian\",command=lambda: langinf(l9))\r\n top.button.grid(column = 1, row = 10)\r\ndef Button10():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Portuguese\",command=lambda: langinf(l10))\r\n top.button1.grid(column = 1, row = 11)\r\ndef voicebutton():\r\n top.button1 = ttk.Button(top.labelFrame, text = \"Voice\",command=lambda: voice(tran))\r\n top.button1.grid(column = 1, row = 13)\r\n\r\ndef fileDialog():\r\n top.path = filedialog.askopenfilename(initialdir = \"/\", title = \"Select A File\", filetype =((\"jpg files\",\"*.jpg\"),(\"all files\",\"*.*\")) )\r\n top.label = ttk.Label(top.labelFrame)\r\n top.label.grid(column = 0, row = 0)\r\n top.label.configure(text = top.path)\r\n rev = top.path[::-1]\r\n lst=rev.split(\"/\")\r\n filename=lst[0][::-1]\r\n return filename\r\ndef voicepath():\r\n global vid\r\n global langin\r\n if langin=='english':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_EN-US_DAVID_11.0'\r\n elif langin=='german':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_DE-DE_HEDDA_11.0'\r\n elif langin=='french':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_FR-FR_HORTENSE_11.0'\r\n elif langin=='spanish':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_ES-MX_SABINA_11.0'\r\n elif langin=='japanese':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_JA-JP_HARUKA_11.0'\r\n elif langin=='chinese':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_ZH-CN_HUIHUI_11.0'\r\n elif langin=='korean':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_KO-KR_HEAMI_11.0'\r\n elif langin=='russian':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_RU-RU_IRINA_11.0'\r\n elif langin=='italian':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_IT-IT_ELSA_11.0'\r\n elif langin=='portuguese':\r\n vid='HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_PT-BR_MARIA_11.0'\r\n\r\n\r\ntop.title(\"OCR Language Translator\")\r\ntop.minsize(640, 400)\r\ntop.wm_iconbitmap('iconn.ico')\r\n\r\n\r\n\r\ndef run():\r\n global tran\r\n voicepath()\r\n engine.setProperty('voice',vid)\r\n engine.setProperty('rate',110)\r\n engine.setProperty('volume',1)\r\n\r\n\r\n #custom= r'--oem 3 --psm 6'\r\n if langc=='english':\r\n code='eng'\r\n elif langc=='german':\r\n code='ger'\r\n elif langc=='french':\r\n code='fre'\r\n elif langc=='spanish':\r\n code='spa'\r\n elif langc=='japanese':\r\n code='jpn'\r\n elif langc=='chinese':\r\n code='zho'\r\n elif langc=='korean':\r\n code='kor'\r\n elif langc=='russian':\r\n code='rus'\r\n elif langc=='italian':\r\n code='ita'\r\n txt=pytesseract.image_to_string(img,code)\r\n key = txt\r\n msg1= tkinter.Message(m, text = key)\r\n msg1.config(bg='white',font=('times', 14, 'normal'))\r\n msg1.pack()\r\n\r\n\r\n langd=detect(txt)\r\n\r\n if langd=='en':\r\n langdet='english'\r\n elif langd=='de':\r\n langdet='german'\r\n elif langd=='fr':\r\n langdet='french'\r\n elif langd=='es':\r\n langdet='spanish'\r\n elif langd=='ja':\r\n langdet='japanese'\r\n elif langd=='zh':\r\n langdet='chinese'\r\n elif langd=='ko':\r\n langdet='korean'\r\n elif langd=='ru':\r\n langdet='russian'\r\n elif langd=='it':\r\n langdet='italian'\r\n elif langd=='pt':\r\n langdet='portuguese'\r\n translator= Translator(from_lang=langdet,to_lang=langin)\r\n tran= translator.translate(txt)\r\n key = tran\r\n message = tkinter.Message(m, text = key)\r\n message.config(bg='white', font=('times', 14, 'normal'))\r\n message.pack()\r\n m.mainloop()\r\ndef submit():\r\n top.button = ttk.Button(top.labelFrame, text = \"Submit\",command =run)\r\n top.button.grid(column = 1, row = 12)\r\n\r\ntop.labelFrame = ttk.LabelFrame(top, text = \"Select image to translate\")\r\ntop.labelFrame.grid(column = 0, row = 1, padx = 20, pady = 20)\r\nbutton()\r\nimg = cv2.imread(fileDialog()) \r\ntop.labelFrame = ttk.LabelFrame(top, text = \"Select input language\")\r\ntop.labelFrame.grid(column = 4, row = 1, padx = 20, pady = 20)\r\nbutton1()\r\nbutton2()\r\nbutton3()\r\nbutton4()\r\nbutton5()\r\nbutton6()\r\nbutton7()\r\nbutton8()\r\nbutton9()\r\nbutton10()\r\ntop.labelFrame = ttk.LabelFrame(top, text = \"Select output language\")\r\ntop.labelFrame.grid(column = 6, row = 1, padx = 20, pady = 20)\r\nButton1()\r\nButton2()\r\nButton3()\r\nButton4()\r\nButton5()\r\nButton6()\r\nButton7()\r\nButton8()\r\nButton9()\r\nButton10()\r\ntop.labelFrame = ttk.LabelFrame(top, text = \"Result\")\r\ntop.labelFrame.grid(column = 8, row = 1, padx = 20, pady = 20)\r\nsubmit()\r\nvoicebutton() \r\n \r\ntop.mainloop()\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.7189805507659912,
"alphanum_fraction": 0.8021461963653564,
"avg_line_length": 66.7727279663086,
"blob_id": "07b00bfa4a70f5a4850c2f4e2f9710ad97d798e6",
"content_id": "af8a4ccc1e84529a691a9dad2a3277cd3eacfeda",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1507,
"license_type": "permissive",
"max_line_length": 561,
"num_lines": 22,
"path": "/README.md",
"repo_name": "AK0055/vOCR-tran",
"src_encoding": "UTF-8",
"text": "# Language-Translator-from-Image-with-Voice-Output-using-OCR\nThis project converts the uploaded image into a form of text using an optical recognition tool (OCR) and then translate that text into the language user wishes and reading that output as a voice. This project is carried out using Python and some other python modules and Tesseract OCR tool as backend. Its main feature is ease of access for end user in terms of uploading query image and availing 99% accurate output. It currently supports the following 10 languages: English, German, French, Spanish, Japanese, Chinese, Korean, Russian, Italian and Portuguese.\n# Requirements\n Tesseract OCR tool\n Pytesseract for Python\n Python 3.7\n OpenCV\n Langdetect\n Pytranslate\n Tkinter\n Microsoft TTS voice package\n# How to use\nThe user first browses the image from their device and uploads it to this\napplication.\nThen user is prompted to click the language in the image and the language they\nwant it to be translated to one by one. \nFinally, the user clicks submit and an output window appears in a few seconds.\nAlso by clicking voice, a voice is generated to read the output language to the\nuser.\n\n\n\n"
}
] | 2 |
artigupta13/personal_portfolio_Project | https://github.com/artigupta13/personal_portfolio_Project | 0064804457dec67cb2706857d20864c15089e003 | 12a129672c9c4ab2e918f11aa57289002b42fc17 | 9facbe334bb78c2a7d4ff340c8491c648b8df49c | refs/heads/master | 2023-04-03T15:53:06.025087 | 2021-04-13T17:17:41 | 2021-04-13T17:17:41 | 340,884,654 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7263157963752747,
"alphanum_fraction": 0.74210524559021,
"avg_line_length": 26.14285659790039,
"blob_id": "da054c6c17e389e9183e7b5d5ac8dc0feb2a7efa",
"content_id": "8d42edba20835316a4ff943a7c43cc5a73ef3833",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 190,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 7,
"path": "/blog/models.py",
"repo_name": "artigupta13/personal_portfolio_Project",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\n# Create your models here.\nclass Blog(models.Model):\n title= models.CharField(max_length=200)\n description=models.TextField()\n date=models.DateField()\n"
},
{
"alpha_fraction": 0.5804877877235413,
"alphanum_fraction": 0.5902438759803772,
"avg_line_length": 28.285715103149414,
"blob_id": "e8442f396bfa7f77ef4209532307dc91ae27c8df",
"content_id": "183305f4c24c263acaf1ced6b04e938952c6d72b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 205,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 7,
"path": "/blog/templates/blog/detail.html",
"repo_name": "artigupta13/personal_portfolio_Project",
"src_encoding": "UTF-8",
"text": "{% extends 'portfolio/base.html' %}\n{% block content %}\n<br><br>\n<h2 style=\"color:red;\">{{blog.title}}</h2>\n<p>{{blog.description|safe }} </p>\n<small> {{ blog.date| date:'F jS Y' }}</small>\n{% endblock %}\n"
},
{
"alpha_fraction": 0.737500011920929,
"alphanum_fraction": 0.7524999976158142,
"avg_line_length": 32.33333206176758,
"blob_id": "be0585c7aee9f517af1d8115d3c9ff3a3ca4a68e",
"content_id": "5a2c04d4ddb4db34cfc8c384c516dd5ae714e5f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 400,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 12,
"path": "/portfolio/views.py",
"repo_name": "artigupta13/personal_portfolio_Project",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, get_object_or_404\nfrom .models import Project\n\n# Create your views here.\ndef home(request):\n projects=Project.objects.all()\n\n return render(request,'portfolio/home.html',{'projects':projects})\n\ndef portfolio(request,project_id):\n project=get_object_or_404(Project,pk=project_id)\n return render(request,'portfolio/portfolios.html',{'project':project})\n"
}
] | 3 |
JoseRobertoMejiaPacheco/partner_pokedex | https://github.com/JoseRobertoMejiaPacheco/partner_pokedex | 4c5ef37719f0c25145b88227d7cdae9921733c6f | c91475805411c5c842705c8831592ab036a76c8d | 67994d375f6219f97ae13179b27d27e1bd49ad66 | refs/heads/master | 2023-06-10T01:58:13.705893 | 2021-06-25T16:06:18 | 2021-06-25T17:21:01 | 380,289,700 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5287485718727112,
"alphanum_fraction": 0.5343855619430542,
"avg_line_length": 31.851852416992188,
"blob_id": "126b119610eacc0c7f0fe248d19e7afa4b093528",
"content_id": "c1a3f2711f48ba22afd55f3be3c5f2cd2f5ec40b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 887,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 27,
"path": "/models/res_partner.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom odoo import models, fields, api\n\n\nclass ResPartner(models.Model):\n _inherit = 'res.partner'\n # pokedex_ids = fields.One2many('partner_pokedex.pokedex', 'partner_id')\n pokedex_ids = fields.Many2many('partner_pokedex.pokedex')\n # pokedex_ids = fields.Many2one('partner_pokedex.pokedex')\n\n @api.multi\n def evolve_pokemon(self):\n return {\n 'name': 'Evolve Pokemon',\n 'type': 'ir.actions.act_window',\n 'view_mode': 'form',\n 'res_model': 'partner_pokedex.evolve',\n # pass the id,\n 'context': {\n # for passing Many2One field context value in Wizard form view\n 'partner_id': self.id,\n 'pokedex_ids': self.pokedex_ids.ids, \n },\n # 'res_id': message_id.id,\n 'target': 'new'\n }\n"
},
{
"alpha_fraction": 0.6054664254188538,
"alphanum_fraction": 0.6072489619255066,
"avg_line_length": 33.60416793823242,
"blob_id": "d015739a71ed5c4119027aabec9d26bd1286cd9a",
"content_id": "8f67fee00b5d0223a9e5d99258b33f6b60c275f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1683,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 48,
"path": "/wizard/evolve_pokemon.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "from odoo.exceptions import ValidationError\nfrom odoo import _, api, fields, models\n\n\nclass EnvolvePokemon(models.TransientModel):\n _name = 'partner_pokedex.evolve'\n _description = 'New Description'\n\n \n def _get_current_pokemon(self):\n selection=[]\n if 'partner_id' in self._context:\n pokemons=self.env['res.partner'].browse(self._context['partner_id']).pokedex_ids\n for pokemon in pokemons:\n selection.append((pokemon.id,pokemon.name))\n return selection\n\n def _selection_filter(self): \n if self.pokedex_ids_c:\n self.pokedex_ids_e=self.env['partner_pokedex.pokedex'].search([('parent_id','in',[self.pokedex_ids_c])]).id\n print(self)\n \n\n confirmation = fields.Boolean() \n pokedex_ids_c = fields.Selection(_get_current_pokemon)\n \n pokedex_ids_e = fields.Many2one('partner_pokedex.pokedex')\n\n\n\n @api.onchange('pokedex_ids_c')\n def _onchange_pokedex_ids_c(self):\n self._selection_filter()\n\n\n @api.multi\n def evolve_pokemon(self):\n if self.confirmation:\n if 'partner_id' in self._context and self.pokedex_ids_e and int(self.pokedex_ids_c):\n pokemons=self.env['res.partner'].browse(self._context['partner_id'])\n pokemons.pokedex_ids=[(3,int(self.pokedex_ids_c))]\n pokemons.pokedex_ids = [(4, self.pokedex_ids_e.id)] \n print(pokemons.pokedex_ids)\n else:\n raise ValidationError(\"You must select at least one pokemon\")\n\n else:\n raise ValidationError(\"You must confirm the evolution of your pokemon\")\n\n\n \n\n\n\n"
},
{
"alpha_fraction": 0.6521739363670349,
"alphanum_fraction": 0.6622073650360107,
"avg_line_length": 28.399999618530273,
"blob_id": "4a5dfc40f93dd90efdf21d83963adbefed9138ae",
"content_id": "f69e984b29056b67c2842e4421d90d68bbcb3cfd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 299,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 10,
"path": "/models/partner_pokedex_moves.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom odoo import models, fields, api\n\nclass PokedexMoves(models.Model):\n _name = 'partner_pokedex.move'\n name = fields.Char()\n Power = fields.Char()\n type = fields.Many2one('partner_pokedex.type')\n pokedex_id = fields.Many2one('partner_pokedex.pokedex')\n \n"
},
{
"alpha_fraction": 0.7405063509941101,
"alphanum_fraction": 0.746835470199585,
"avg_line_length": 30.799999237060547,
"blob_id": "73a3c44d8019c96ece6b3c085339ab5f9b5bf182",
"content_id": "08f158f6706cfafdd1756834abb181ba834dc142",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 158,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 5,
"path": "/models/__init__.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nfrom . import res_partner\nfrom . import partner_pokedex_type\nfrom . import partner_pokedex_moves\nfrom . import partner_pokedex_pokedex"
},
{
"alpha_fraction": 0.8214285969734192,
"alphanum_fraction": 0.8214285969734192,
"avg_line_length": 28,
"blob_id": "2198a0b1573eca0590f099b9613ff2af3f47823a",
"content_id": "416d3edf5cd6826252cff7f3cf93d9032d5d9052",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 28,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 1,
"path": "/wizard/__init__.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "from . import evolve_pokemon"
},
{
"alpha_fraction": 0.6281920075416565,
"alphanum_fraction": 0.6343207359313965,
"avg_line_length": 28.606060028076172,
"blob_id": "7a25aff6a70231d411d4c4bf5778ce9df00d2a6b",
"content_id": "17d2b3459c202292763d380603dd914b5e08677d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 979,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 33,
"path": "/models/partner_pokedex_pokedex.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom odoo import models, fields, api\n\n\nclass Pokedex(models.Model):\n _name = 'partner_pokedex.pokedex'\n _parent_store = True\n name = fields.Char()\n height = fields.Float()\n weight = fields.Float()\n description = fields.Text()\n type = fields.Many2many('partner_pokedex.type')\n image = fields.Binary()\n partner_id = fields.Many2one('res.partner')\n\n moves_ids = fields.One2many('partner_pokedex.move', 'pokedex_id')\n parent_left = fields.Integer(index=True)\n parent_right = fields.Integer(index=True)\n parent_id = fields.Many2one(\n 'partner_pokedex.pokedex',\n ondelete='restrict',\n index=True)\n\n child_ids = fields.One2many(\n 'partner_pokedex.pokedex', 'parent_id'\n )\n\n @api.constrains('parent_id')\n def _check_hierarchy(self):\n if not self._check_recursion():\n raise models.ValidationError(\n 'Error! You cannot create recursive categories.')\n\n\n"
},
{
"alpha_fraction": 0.6051344871520996,
"alphanum_fraction": 0.6063569784164429,
"avg_line_length": 39.95000076293945,
"blob_id": "50292c1853247c72ca8b527c83d21b2263e9ff02",
"content_id": "ec69b964ecc81242d86de242ba99eaba8dcd4001",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 818,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 20,
"path": "/controllers/controllers.py",
"repo_name": "JoseRobertoMejiaPacheco/partner_pokedex",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nfrom odoo import http\n\n# class PartnerPokedex(http.Controller):\n# @http.route('/partner_pokedex/partner_pokedex/', auth='public')\n# def index(self, **kw):\n# return \"Hello, world\"\n\n# @http.route('/partner_pokedex/partner_pokedex/objects/', auth='public')\n# def list(self, **kw):\n# return http.request.render('partner_pokedex.listing', {\n# 'root': '/partner_pokedex/partner_pokedex',\n# 'objects': http.request.env['partner_pokedex.partner_pokedex'].search([]),\n# })\n\n# @http.route('/partner_pokedex/partner_pokedex/objects/<model(\"partner_pokedex.partner_pokedex\"):obj>/', auth='public')\n# def object(self, obj, **kw):\n# return http.request.render('partner_pokedex.object', {\n# 'object': obj\n# })"
}
] | 7 |
socrateschieregato/TravelsTest | https://github.com/socrateschieregato/TravelsTest | 5fa01723003ebd3653351d2f9b7800e3d8c924ce | e3a42f4ef996c7f119f94f5f243ed315aadf476f | 27e945ff64236665f77d5f50af15e3788a85227f | refs/heads/master | 2022-12-16T17:57:21.488704 | 2020-09-15T00:58:32 | 2020-09-15T01:04:35 | 295,448,991 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5401034951210022,
"alphanum_fraction": 0.5478654503822327,
"avg_line_length": 26.122806549072266,
"blob_id": "a255f00c9c0eeca6cad0a97a71b819a41b5d2216",
"content_id": "0e6e0c4aebb3196cc668804714f7dad0fd584a60",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1546,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 57,
"path": "/travels/helpers.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "import json\nimport logging\nimport urllib.parse\n\nfrom travels.exceptions import NotFoundException\nfrom travels.urls import urls, Views\n\nlogger = logging.getLogger('backend')\n\n\ndef route_data_or_404(pieces):\n try:\n method, url_data, protocol = pieces[0].strip().split()\n query_params = {}\n if '?' in url_data:\n url_data, query_params_data = url_data.split('?')\n query_params = dict(urllib.parse.parse_qsl(query_params_data))\n\n url = url_data.replace('/', '')\n body = json.loads(pieces[-1].replace('\\n\\t', '')) if pieces[-1] != '' else None\n route_data = {\n 'method': method,\n 'url': url,\n 'body': body,\n 'protocol': protocol,\n 'query_params': query_params if query_params else None\n }\n logger.info(route_data)\n\n return route_data\n except Exception as e:\n raise NotFoundException(f'Error to decode request: {e}')\n\n\ndef get_view(route_data):\n for r in urls:\n if route_data['url'] == r[0]:\n method = getattr(\n Views(route_data),\n r[0],\n lambda: True\n )\n logger.info(f'View: {r[0]}')\n return method()\n\n raise NotFoundException('Page not Found')\n\n\ndef to_json(routes):\n routes_dict = []\n obj = {}\n for route in routes:\n obj['source'] = route[0]\n obj['destination'] = route[1]\n obj['price'] = int(route[2])\n routes_dict.append(obj.copy())\n return routes_dict\n"
},
{
"alpha_fraction": 0.5753424763679504,
"alphanum_fraction": 0.5753424763679504,
"avg_line_length": 15.222222328186035,
"blob_id": "1dcf75ab84a04d085cf0893b2e8db90b33a6044e",
"content_id": "7e340199ee5de748bdc3fee63d4e836b7afa8c94",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 146,
"license_type": "permissive",
"max_line_length": 31,
"num_lines": 9,
"path": "/travels/urls.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "from travels.views import Views\n\nr = Views()\n\nurls = [\n ('routes', r.routes),\n ('new_route', r.new_route),\n ('get_route', r.get_route)\n]\n"
},
{
"alpha_fraction": 0.5639152526855469,
"alphanum_fraction": 0.5639152526855469,
"avg_line_length": 27.52083396911621,
"blob_id": "d3afc1a8e575a8436ea82d7b2a46a3ad2815f4cf",
"content_id": "b9bf991a34ae3ab04ba35a7889607a510810111e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1369,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 48,
"path": "/travels/views.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "from travels.exceptions import ErrorWriteFile\nfrom travels.script import FindRoute, write_file\nfrom settings import FILE\n\n\nclass Views:\n\n def __init__(self, route_data=None, file_data=None):\n self.data = file_data or FILE\n self.route = FindRoute(file_data=self.data)\n self.route_data = route_data\n\n def routes(self):\n from travels.helpers import to_json\n\n routes = self.route.get_rows_from_file()\n obj = to_json(routes)\n\n return obj\n\n def new_route(self):\n source = self.route_data['body']['source'].upper()\n destination = self.route_data['body']['destination'].upper()\n price = self.route_data['body']['price']\n\n if write_file(self.data, source, destination, price):\n return {\n 'source': source,\n 'destination': destination,\n 'price': price\n }\n raise ErrorWriteFile('Error saving data')\n\n def get_route(self):\n route = FindRoute(\n self.route_data['query_params']['source'].upper(),\n self.route_data['query_params']['destination'].upper(),\n self.data\n )\n route.dijkstra()\n route.best_route()\n\n result = {\n 'route': f\"{route.source} - {' - '.join(route.path)}\",\n 'price': route.price\n }\n\n return result\n"
},
{
"alpha_fraction": 0.5256145000457764,
"alphanum_fraction": 0.5430737137794495,
"avg_line_length": 29.87234115600586,
"blob_id": "243e1582cafab3e136bf97aa0443aea61f373137",
"content_id": "87c6df0c4e5401f5773f73c60399f2ec5e60118e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4353,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 141,
"path": "/tests/test_api.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "import json\nimport unittest\nfrom socket import socket, AF_INET, SOCK_STREAM\n\nfrom travels.script import FindRoute\nfrom settings import FILE, INITIAL_DATA\n\n\ndef remove_last_line():\n with open(FILE, 'w') as f:\n for row in INITIAL_DATA:\n f.writelines(row)\n\n\nclass TestApi(unittest.TestCase):\n\n def setUp(self):\n self.client_socket = socket(AF_INET, SOCK_STREAM)\n\n def tearDown(self):\n self.client_socket.close()\n\n def test_routes(self):\n self.client_socket.connect(('localhost', 8000))\n self.client_socket.send('GET /routes HTTP/1.1\\r\\n\\r\\n'.encode())\n response = (self.client_socket.recv(1024).decode()).strip().split('\\r\\n')\n protocol, status_code, status = response[0].split()\n data = json.loads(response[-1].replace('\\'', '\\\"'))\n\n self.assertEqual(int(status_code), 200)\n self.assertEqual(len(data), 7)\n self.assertEqual(data[0]['source'], 'GRU')\n self.assertEqual(data[-1]['source'], 'SCL')\n\n def test_get_route_gru_cdg(self):\n self.client_socket.connect(('localhost', 8000))\n self.client_socket.send(\n 'GET /get_route?source=gru&destination=CDG HTTP/1.1\\r\\n\\r\\n'.encode()\n )\n response = (self.client_socket.recv(1024).decode()).strip().split('\\r\\n')\n protocol, status_code, status = response[0].split()\n data = json.loads(response[-1].replace('\\'', '\\\"'))\n\n self.assertEqual(data, {'route': 'GRU - BRC - SCL - ORL - CDG', 'price': 40})\n self.assertEqual(int(status_code), 200)\n self.assertEqual(len(data), 2)\n\n def test_get_route_not_in_file(self):\n self.client_socket.connect(('localhost', 8000))\n self.client_socket.send(\n 'GET /get_route?source=ABC&destination=XYZ HTTP/1.1\\r\\n\\r\\n'.encode()\n )\n response = (self.client_socket.recv(1024).decode()).strip().split('\\r\\n')\n protocol, status_code, status = response[0].split()\n data = json.loads(response[-1].replace('\\'', '\\\"'))\n\n self.assertEqual(data, {'detail': 'BAD_REQUEST'})\n self.assertEqual(int(status_code), 400)\n\n def test_new_route(self):\n self.client_socket.connect(('localhost', 8000))\n self.client_socket.send(\n 'POST /new_route HTTP/1.1\\r\\n\\r\\n'\n '{\\n\\t\"source\": \"ABC\",\\n\\t\"destination\": \"DEF\",\\n\\t\"price\": 35\\n}'.encode()\n )\n response = (self.client_socket.recv(1024).decode()).strip().split('\\r\\n')\n protocol, status_code, status = response[0].split()\n data = json.loads(response[-1].replace('\\'', '\\\"'))\n\n self.assertEqual(data, {'source': 'ABC', 'destination': 'DEF', 'price': 35})\n self.assertEqual(int(status_code), 201)\n remove_last_line()\n\n def test_get_route_gru_cdg(self):\n route = FindRoute('GRU', 'CDG', FILE)\n route.dijkstra()\n route.best_route()\n\n self.assertEqual(\n route.result,\n {\n 'route': 'GRU - BRC - SCL - ORL - CDG',\n 'price': 40\n }\n )\n\n def test_get_route_brc_cdg(self):\n route = FindRoute('BRC', 'CDG', FILE)\n route.dijkstra()\n route.best_route()\n\n self.assertEqual(\n route.result,\n {\n 'route': 'BRC - SCL - ORL - CDG',\n 'price': 30\n }\n )\n\n def test_get_route_brc_cdg_with_lowercase(self):\n route = FindRoute('brc', 'cdg', FILE)\n route.dijkstra()\n route.best_route()\n\n self.assertEqual(\n route.result,\n {\n 'route': 'BRC - SCL - ORL - CDG',\n 'price': 30\n }\n )\n\n def test_get_route_without_source(self):\n route = FindRoute('', 'cdg', FILE)\n route.dijkstra()\n route.best_route()\n\n self.assertEqual(\n route.result,\n {}\n )\n\n def test_get_route_without_destination(self):\n route = FindRoute('brc', '', FILE)\n route.dijkstra()\n route.best_route()\n\n self.assertEqual(\n route.result,\n {}\n )\n\n def test_get_route_without_file(self):\n route = FindRoute('brc', 'cdg')\n route.dijkstra()\n route.best_route()\n\n self.assertEqual(\n route.result,\n {}\n )\n"
},
{
"alpha_fraction": 0.5349476933479309,
"alphanum_fraction": 0.5426527261734009,
"avg_line_length": 30.877193450927734,
"blob_id": "f697de6644db57274a9970cb74f301113bb5341e",
"content_id": "e3dbfcc9b46a24cce34413f127388db0d6700cd5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3634,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 114,
"path": "/tests/test_script.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "import logging\nimport mock\nimport os\nimport unittest\nfrom getopt import GetoptError\n\nfrom travels.exceptions import ErrorWriteFile\nfrom travels.script import FindRoute, write_file, file_data_console\nfrom settings import FILE\n\nlogger = logging.getLogger('backend')\n\n\nclass TestScript(unittest.TestCase):\n\n def setUp(self):\n self.graph = {\n 'GRU': {\n 'BRC': 10,\n 'CDG': 75,\n 'SCL': 20,\n 'ORL': 56\n },\n 'BRC': {\n 'SCL': 5\n },\n 'SCL': {\n 'ORL': 20\n },\n 'CDG': {},\n 'ORL': {\n 'CDG': 5\n }\n }\n\n def test_get_rows_from_file(self):\n route = FindRoute(file_data=FILE)\n rows = route.get_rows_from_file()\n self.assertEqual(len(rows), 7)\n\n def test_get_rows_from_file_without_file(self):\n route = FindRoute()\n rows = route.get_rows_from_file()\n self.assertEqual(rows, None)\n\n def test_graph_from_file(self):\n route = FindRoute(file_data=FILE)\n graph = route.graph_from_file()\n self.assertEqual(\n graph,\n self.graph\n )\n\n def test_graph_from_file_with_no_exits_routes_graph_should_be_equal(self):\n route = FindRoute('ABC', 'DEF', FILE)\n graph = route.graph_from_file()\n self.assertEqual(graph, self.graph)\n\n def test_dijkstra_calculate_route(self):\n route = FindRoute('GRU', 'CDG', FILE)\n route.dijkstra()\n self.assertEqual(route.unseen_nodes, {})\n self.assertEqual(\n route.shortest_distance,\n {'GRU': 0, 'BRC': 10, 'SCL': 15, 'CDG': 40, 'ORL': 35}\n )\n self.assertEqual(\n route.predecessor,\n {'BRC': 'GRU', 'CDG': 'ORL', 'SCL': 'BRC', 'ORL': 'SCL'}\n )\n\n def test_best_route(self):\n route = FindRoute('GRU', 'CDG', FILE)\n route.dijkstra()\n best_route = route.best_route()\n result_string_expected = (\n f\"best route: {route.source} - {' - '.join(route.path)}\"\n f\" > {route.shortest_distance[route.destination]}\"\n )\n self.assertEqual(best_route, result_string_expected)\n\n def test_best_route_logs(self):\n with self.assertLogs('backend', level='INFO') as log:\n route = FindRoute('GRU', 'CDG', FILE)\n route.dijkstra()\n route.best_route()\n\n self.assertIn('best route: GRU - BRC - SCL - ORL - CDG > 40', log.output[0])\n\n def test_write_file(self):\n source, destination, price = ['ABC', 'DEF', 42]\n file = write_file('tests/write-file.csv', source, destination, price)\n\n assert file\n os.remove('tests/write-file.csv')\n\n def test_write_file_should_return_an_error(self):\n source, destination, price = ['ABC', 'DEF', None]\n with self.assertRaises(ErrorWriteFile):\n write_file('tests/write-file.csv', source, destination, price)\n\n def test_file_data_console(self):\n argv = ['tests/input-file-test.csv']\n file = file_data_console(argv)\n self.assertEqual(file, FILE)\n\n @mock.patch('sys.exit')\n def test_file_data_console_without_params(self, mock_sys):\n mock_sys.side_effect = GetoptError\n argv = []\n with self.assertRaises(Exception):\n with self.assertLogs('backend', level='ERROR') as log:\n file_data_console(argv)\n self.assertIn('Example: python script.py <input_file.csv>', log.output[0])\n"
},
{
"alpha_fraction": 0.7362637519836426,
"alphanum_fraction": 0.7362637519836426,
"avg_line_length": 13.833333015441895,
"blob_id": "bd9cb44965cc89b53dbbc4534ab1ed73fc104f05",
"content_id": "bf3f0e94433727d485deafd4617a4f78e26b26fb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 91,
"license_type": "permissive",
"max_line_length": 35,
"num_lines": 6,
"path": "/travels/exceptions.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "\n\nclass NotFoundException(Exception):\n pass\n\n\nclass ErrorWriteFile(Exception):\n pass\n"
},
{
"alpha_fraction": 0.6868464350700378,
"alphanum_fraction": 0.702256441116333,
"avg_line_length": 18.7608699798584,
"blob_id": "bc76eb499e559651946578fe8d5fd0dd49e7d38c",
"content_id": "a605b95ed2077390e76ed7f55ad4561597a4a543",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1817,
"license_type": "permissive",
"max_line_length": 128,
"num_lines": 92,
"path": "/README.md",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "## TravelsTest\nThis software is responsible to retrieve, create show all available routes.\n\n\n\n## Getting Started\nThese instructions will get you a copy of the project up and running on your local machine for development and testing purposes.\n\n## Prerequisites\n\n python version [3.6.8]\n pyenv / virtualenv\n \n## Installation\nHow to install the prerequisites:\n\n`$ sudo apt-get install python3.6.8`\n\n### How to create a virtualenv (using pyenv):\n```\n$ pyenv install [version]\n$ pyenv global [version]\n$ pyenv virtualenv [version] travels_test\n$ pyenv activate travels_test\n```\n\n## Running the tests\n\nTo run the test suite, execute:\n\n`$ python server.py`\n\nYou should see this message: `Server up and running on: http://localhost:8000`\nand on **another** shell instance run:\n \n`(travels_test)$ python -m unittest discover tests -v`\n\n\n\n\n## Running the Application\n\n### On Shell\n\n`$ python script.py input-file.csv`\n\nExample:\n ```shell\n please enter the route: GRU-CDG\n best route: GRU - BRC - SCL - ORL - CDG > $40\n please enter the route: BRC-CDG\n best route: BRC - ORL > $30\n ```\n\n### Running the Application on Insomnia/Postman**\n\nrun first:\n\n`$ python server.py`\n\nThen you should source the environment variables file(to persist on input-file.csv on root):\n\n`(travels_test)$ source .travelsrc`\n\n#### Get - routes\n\nIt will list all routes\n\n`localhost:8000/routes/`\n\n\n\n#### Get - route\n\n`localhost:8000/get_route?source=<source>&destination=<destination>`\n\n\n\n#### Post - new_route\n\nto create a route, just post with the payload above.\n\n`localhost:8000/new_route/`\n```json\n{\n\t\"source\": \"ABC\",\n\t\"destination\": \"DEF\",\n\t\"price\": 35\n}\n```\n\n"
},
{
"alpha_fraction": 0.5574905276298523,
"alphanum_fraction": 0.561062753200531,
"avg_line_length": 32.931819915771484,
"blob_id": "e2c482c1d6b79bca81cbfbb5acfb1e8fdffda9c0",
"content_id": "be0f042893a52c37b3fc6bc8a28c3dfc530c15fa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4479,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 132,
"path": "/travels/script.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nimport logging\nimport sys\nimport getopt\n\nfrom travels.exceptions import ErrorWriteFile\n\nlogger = logging.getLogger('backend')\n\n\nclass FindRoute:\n\n def __init__(self, source=None, destination=None, file_data=None):\n self.source = source.upper() if source else None\n self.destination = destination.upper() if destination else None\n self.price = 0\n self.data = file_data\n self.graph = self.graph_from_file()\n self.path = []\n self.shortest_distance = {}\n self.predecessor = {}\n self.unseen_nodes = self.graph.copy()\n self.infinity = 999999\n self.result = {}\n\n def get_rows_from_file(self):\n if self.data:\n rows = []\n with open(self.data, 'r') as f:\n reader = f.readlines()\n for row in reader:\n rows.append(row.strip().split(','))\n return rows\n\n def graph_from_file(self):\n rows = self.get_rows_from_file() or []\n graph = {}\n for row in rows:\n source, destination, price = row\n if source not in graph:\n graph[source] = {}\n if not graph.get(destination):\n graph[destination] = {}\n graph[source][destination] = int(price)\n\n return graph\n\n def dijkstra(self):\n for node in self.unseen_nodes:\n self.shortest_distance[node] = self.infinity\n self.shortest_distance[self.source] = 0\n self.calculate_route()\n\n def calculate_route(self):\n while self.unseen_nodes:\n min_node = None\n for node in self.unseen_nodes:\n if min_node is None or self.shortest_distance[node] < self.shortest_distance[min_node]:\n min_node = node\n\n for child_node, price in self.graph[min_node].items():\n if price + self.shortest_distance[min_node] < self.shortest_distance[child_node]:\n self.shortest_distance[child_node] = price + self.shortest_distance[min_node]\n self.predecessor[child_node] = min_node\n\n self.unseen_nodes.pop(min_node)\n\n def best_route(self):\n current_node = self.destination or self.source\n prerequisites = bool(self.source and self.destination and self.data)\n while current_node != self.source:\n try:\n self.path.insert(0, current_node)\n current_node = self.predecessor[current_node]\n except KeyError:\n logger.info('Path not reachable')\n break\n if prerequisites and self.shortest_distance[self.destination] != self.infinity:\n self.price = self.shortest_distance[self.destination]\n self.result = {\n 'route': f\"{self.source} - {' - '.join(self.path)}\",\n 'price': (self.shortest_distance[self.destination])\n }\n result_string = (\n f\"best route: {self.source} - {' - '.join(self.path)}\"\n f\" > {self.shortest_distance[self.destination]}\"\n )\n logger.info(result_string)\n return result_string\n\n\ndef file_data_console(argv):\n error_message = 'Example: python script.py <input_file.csv>'\n try:\n _, input_file = getopt.getopt(argv, None)\n if not input_file:\n raise getopt.GetoptError(error_message)\n except getopt.GetoptError:\n logger.error(error_message)\n sys.exit(2)\n\n return input_file[0]\n\n\ndef write_file(file, source, destination, price):\n try:\n logger.info('Starting to save in csv file')\n if bool(source and destination and price):\n new_entry = f'{source},{destination},{str(price)}'\n with open(file, 'a+') as f:\n f.write('\\n')\n f.write(new_entry)\n f.close()\n logger.info('Data saved in csv with success!')\n return True\n else:\n raise ErrorWriteFile\n except Exception:\n raise ErrorWriteFile\n\n\nif __name__ == \"__main__\":\n option = 1\n while option != 0:\n input_data = input('please enter the route: ')\n if not input_data:\n break\n input_data = input_data.upper().split('-')\n file_data = file_data_console(sys.argv[1:])\n route = FindRoute(input_data[0], input_data[1], file_data=file_data)\n route.dijkstra()\n print(route.best_route())\n"
},
{
"alpha_fraction": 0.48600509762763977,
"alphanum_fraction": 0.5241730213165283,
"avg_line_length": 22.117647171020508,
"blob_id": "f64eb516360b2675288713fe06549b2931c65f5b",
"content_id": "14e553ee4d906e6bd5903c325f19e5bc06e767d5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 393,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 17,
"path": "/settings.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "import os\n\nFILE = os.environ.get('FILE', 'tests/input-file-test.csv')\n\nDATA_STRUCTURE = \"HTTP/1.1 {status_code} {status}\\r\\n\" \\\n \"Content-Type: application/json; charset=utf-8\" \\\n \"\\r\\n\\r\\n{body}\\r\\n\\r\\n\"\n\nINITIAL_DATA = [\n 'GRU,BRC,10\\n',\n 'BRC,SCL,5\\n',\n 'GRU,CDG,75\\n',\n 'GRU,SCL,20\\n',\n 'GRU,ORL,56\\n',\n 'ORL,CDG,5\\n',\n 'SCL,ORL,20'\n]\n"
},
{
"alpha_fraction": 0.4438687264919281,
"alphanum_fraction": 0.4594127833843231,
"avg_line_length": 30.29729652404785,
"blob_id": "11d4412cf77fe553ebf4502fb206da7904852803",
"content_id": "0113b2672106fa5bce10f4329a125e9b9899dd8e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2316,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 74,
"path": "/server.py",
"repo_name": "socrateschieregato/TravelsTest",
"src_encoding": "UTF-8",
"text": "import logging\nfrom socket import (\n AF_INET,\n socket,\n SOCK_STREAM,\n SHUT_WR\n)\n\nfrom travels.exceptions import NotFoundException\nfrom travels.helpers import route_data_or_404, get_view\nfrom settings import DATA_STRUCTURE\n\nlogger = logging.getLogger('backend')\n\n\nclass Server:\n\n def create_server(self):\n try:\n server = socket(AF_INET, SOCK_STREAM)\n server.bind(('localhost', 8000))\n server.listen(5)\n\n while True:\n (client_socket, address) = server.accept()\n rd = client_socket.recv(5000).decode()\n pieces = rd.split('\\r\\n')\n if len(pieces) > 0:\n print(pieces[0])\n\n try:\n route_data = route_data_or_404(pieces)\n body = get_view(route_data)\n if body and route_data['method'] == 'POST':\n status_code = 201\n else:\n status_code = 200\n\n data = DATA_STRUCTURE.format(\n status_code=status_code,\n status='OK',\n body=body if body else ''\n )\n client_socket.send(bytes(data, 'utf-8'))\n\n except NotFoundException as e:\n data = DATA_STRUCTURE.format(\n status_code=404,\n status='NOT_FOUND',\n body={'detail': \"NOT_FOUND\"}\n )\n client_socket.send(bytes(data, 'utf-8'))\n logger.error(f\"Erro: {e}\")\n\n except Exception as e:\n data = DATA_STRUCTURE.format(\n status_code=400,\n status='BAD_REQUEST',\n body={'detail': \"BAD_REQUEST\"}\n )\n client_socket.send(bytes(data, 'utf-8'))\n logger.error(f\"Erro: {e}\")\n finally:\n client_socket.shutdown(SHUT_WR)\n\n except KeyboardInterrupt:\n logger.info(\"\\nShutting down...\\n\")\n finally:\n server.close()\n\n\nprint('Server up and running on: http://localhost:8000')\nserver = Server()\nserver.create_server()\n"
}
] | 10 |
bernhardstandl/algodat | https://github.com/bernhardstandl/algodat | 3998cb05f8f9a89bfddc186008d5fd98b7cd14e7 | 9d1c72d88460a846ea07a185058f3a8c41f34c2d | 45f1bec66518f26d83a61fdd8481637c79898bd2 | refs/heads/master | 2023-06-23T11:25:44.766740 | 2023-06-21T04:25:22 | 2023-06-21T04:25:22 | 205,582,380 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5674300193786621,
"alphanum_fraction": 0.605597972869873,
"avg_line_length": 31.75,
"blob_id": "fbdf0c1edf27828d1ab7c839536ed1bac282b0fc",
"content_id": "82b8e813dd2f8f1cabc37cfe40698151be895c00",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 395,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 12,
"path": "/BubbleSort.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "Liste = [10,9,8,7,6,5,4,3,2,1] #Liste erstellen\n\nN = len(Liste) #Länge der Liste\nfor i in range(N): #Schleife außen\n for j in range(N-1): #Schleife innen\n if(Liste[j+1]<Liste[j]): ## Logische Bedingung\n tmp = Liste[j+1] #kleinerer Wert in tmp\n Liste[j+1]= Liste[j] #rechts wird links\n Liste[j] = tmp #links wird tmp\n\n\nprint(Liste) #Ergebnis Programm\n"
},
{
"alpha_fraction": 0.648829460144043,
"alphanum_fraction": 0.6939799189567566,
"avg_line_length": 25,
"blob_id": "19e01b0ab79fc7768e99c05e52b36a6fe674b943",
"content_id": "2dd782e34d4d1a3395429250546059fbdd0d07a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 598,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 23,
"path": "/Liste_Array_Zeitmessung.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "#Zeitmessung#\n#####Algorithmen und Datenstrukturen #####\nimport time\nimport random\nimport numpy as np\n\n#Liste\nstart = time.time() #Stoppuhr Start\n##### Programm Start #####\nrandom = random.sample(range(1, 1000),999)\nende = time.time() #Stoppuhr Ende\ngesamt = ende-start ##Differenz Start/Stopp\nprint(round(gesamt*1000,2),\"ms (Liste)\")\n\n\n#Array\nstart = time.time() #Stoppuhr Start\n##### Programm Start #####\nrandom = np.random.randint(range(1,1000),1000)\n##### Programm Ende ##### \nende = time.time() #Stoppuhr Ende\ngesamt = ende-start ##Differenz Start/Stopp\nprint(round(gesamt*1000,2),\"ms Array\")\n"
},
{
"alpha_fraction": 0.6358974575996399,
"alphanum_fraction": 0.6897435784339905,
"avg_line_length": 23.375,
"blob_id": "16cc16bfccca5fc0c37c466de64f21bf7049c8d4",
"content_id": "691487f5bf4ec38ea2154c9d67d260b98a271e15",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 390,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 16,
"path": "/Zeitmessung.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "#Zeitmessung#\n#####Algorithmen und Datenstrukturen #####\nimport time\nimport random\nimport numpy as np\n\nstart = time.time() #Stoppuhr Start\n##### Programm Start #####\n#random = random.sample(range(1, 1000), 500)\nrandom = np.random.randint(1,1000,500)\n##### Programm Ende ##### \n\nende = time.time() #Stoppuhr Ende\ngesamt = ende-start ##Differenz Start/Stopp\n\nprint(round(gesamt*1000,2),\"ms\")\n"
},
{
"alpha_fraction": 0.813017725944519,
"alphanum_fraction": 0.8142011761665344,
"avg_line_length": 39.238094329833984,
"blob_id": "948f20437c5518c3d08a94b4df326baf4d74c278",
"content_id": "40b9c9a394031ab091c2ac530ea6af86438abfbf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 846,
"license_type": "no_license",
"max_line_length": 187,
"num_lines": 21,
"path": "/README.md",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "# Algorithmen und Datenstrukturen (PH Karlsruhe, Informatik Lehramt)\nBernhard Standl\n\nIn diesem Repository werden Python Snippets zur Lehrveranstaltung Algorithmen und Datenstrukturen gesammelt und hier her gespiegelt: https://mybinder.org/v2/gh/bernhardstandl/algodat/HEAD\n\n## Snippets\n- Aufgabe Schleifen (Aufgabe_Schleifen.py): Lösung der drei Aufgaben mit wachsender Schwierigkeit\n\n## Sortieralgorithmen\n- Bubblesort (BubbleSort.py)\n- SelectionSort (SelectionSort.py)\n\n## Zeitmessung\n- Vorlage (Zeitmessung.py)\n- Bubblesort (Zeitmessung_BubbleSort.py)\n- Visualisierung Bubble Sort (Zeitmessung_Visualisierung_Bubble.py)\n- Visualisierung Bubble Sort / Selection (Zeitmessung_Visualisierung_BubbleSelection.py)\n\n## Datenstrukturen\n- Listen/Arrays mit Zeitmessung (Liste_Array_Zeitmessung.py)\n- Operationen auf Listen/Arrays (ArrayListe_Operationen.py)\n"
},
{
"alpha_fraction": 0.5751824975013733,
"alphanum_fraction": 0.6014598608016968,
"avg_line_length": 33.150001525878906,
"blob_id": "cc2500599c5695749a732c1986f7f4cc97a34965",
"content_id": "fccaf48d565d8542f150c81df3d8b8f09ce8a98c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 691,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 20,
"path": "/BubbleSort_comment.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "Liste = [10,9,8,7,6,5,4,3,2,1] #Liste erstellen\n\nN = len(Liste) #Länge der Liste\nprint(\"Eingabe Liste=\",Liste)\nprint(\"Liste Länge=\",N)\n\nfor i in range(N): #Schleife außen\n \n #Schleife innen N=länge des Arrays, -i=Verkürzung des Bereichs in jedem Durchlauf, -1=vorletztes+letztes vergleichen\n \n print(\"\\nDurchlauf\",i,\"Länge=\",N-i-1)\n for j in range(N-i-1): \n if(Liste[j+1]<Liste[j]): ## Logische Bedingung\n \n tmp = Liste[j+1] #kleinerer Wert in tmp\n Liste[j+1]= Liste[j] #rechts wird links\n Liste[j] = tmp #links wird tmp\n print(j+1,\"Liste in Durchlauf\", i,Liste) \n\nprint(Liste) #Ergebnis Programm\n\n\n"
},
{
"alpha_fraction": 0.6050724387168884,
"alphanum_fraction": 0.6557971239089966,
"avg_line_length": 18.714284896850586,
"blob_id": "69e3b14602f986fa1aa9dc75445037992543e10e",
"content_id": "06dd28290e721cb5c5f8f67fc004bb2e1b1ff22a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 276,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 14,
"path": "/Klasse.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "class RechnenMitZweiZahlen:\n def __init__(self, z1, z2):\n self.z1 = z1\n self.z2 = z2\n\n def multiplikation(self):\n print(self.z1 * self.z2)\n \n def addition(self):\n print(self.z1 + self.z2)\n\nobj = RechnenMitZweiZahlen(10, 36)\nobj.multiplikation()\nobj.addition()\n"
},
{
"alpha_fraction": 0.6250967979431152,
"alphanum_fraction": 0.651432991027832,
"avg_line_length": 29.738094329833984,
"blob_id": "42bafe73320bf185e0c1d581c6bc9b2196facb09",
"content_id": "7622adf0d23811d5698106b92ee5946fff185462",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1294,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 42,
"path": "/zeitmessung_frame.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "import time #Zeitmessung\nimport random #Zufallszahlen\nimport numpy as np #Array\nfrom llist import dllist #Linked List\nimport matplotlib.pyplot as plt\n\nzeit1 = [] #Liste 1 für die Zeitmessungen\nzeit2 = [] #Liste 2 für die Zeitmessungen\nzeit3 = [] #Liste 3 für die Zeitmessungen\n\nfor i in range(10,200): #Schleife zur iterativen Erzeugung der Anzahl \n start = time.time() #Stoppuhr Start\n ##### Programm Start #####\n # Hier kommt das Programm 1\n ##### Programm Ende ##### \n ende = time.time() #Stoppuhr Ende\n gesamt = ende-start ##Differenz Start/Stopp\n zeit1.append(gesamt)\n \n start = time.time() #Stoppuhr Start\n ##### Programm Start #####\n # Hier kommt das Programm 2\n ##### Programm Ende ##### \n ende = time.time() #Stoppuhr Ende\n gesamt = ende-start ##Differenz Start/Stopp\n zeit2.append(gesamt)\n \n start = time.time() #Stoppuhr Start\n ##### Programm Start #####\n # Hier kommt das Programm 3\n ##### Programm Ende ##### \n ende = time.time() #Stoppuhr Ende\n gesamt = ende-start ##Differenz Start/Stopp\n zeit3.append(gesamt)\n \n\n\nplt.plot(zeit1,'r') #Plot von Programm 1\nplt.plot(zeit2,'g') #Plot von Programm 2\nplt.plot(zeit3,'b') #Plot von Programm 3\nplt.axis([0, 1000, 0, 0.0006]) #Skalierung der Achsen\nplt.show()\n"
},
{
"alpha_fraction": 0.5937161445617676,
"alphanum_fraction": 0.6143012046813965,
"avg_line_length": 29.766666412353516,
"blob_id": "15084c9dad98acdd95272175faa2251490984b7c",
"content_id": "dfaf6f83bb7b6cf188cd84e58e2e01ed7797ee72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 927,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 30,
"path": "/Zeitmessung_Visualisierung_Bubble.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "#Zeitmessung Bubble Sort mit Visualisierung#\n#####Algorithmen und Datenstrukturen #####\nimport time\nimport random\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nzeit = [] #Liste für die Messungen\n\nfor i in range(10,200):\n Liste = np.random.randint(low = 0, high = 1000, size = i) #Liste mit Werten von 1-1000 mit der Länge i\n \n start = time.time() #Stoppuhr Start\n \n ##### Programm Start #####\n N = len(Liste) #Länge der Liste\n for i in range(N): #Schleife außen\n for j in range(N-1): #Schleife innen\n if(Liste[j+1]<Liste[j]): ## Logische Bedingung\n tmp = Liste[j+1] #kleinerer Wert in tmp\n Liste[j+1] = Liste[j] #rechts wird links\n Liste[j] = tmp #links wird tmp\n ##### Programm Ende ##### \n\n ende = time.time() #Stoppuhr Ende\n gesamt = ende-start ##Differenz Start/Stopp\n zeit.append(gesamt)\n\nplt.plot(zeit) \nplt.show()\n"
},
{
"alpha_fraction": 0.5768321752548218,
"alphanum_fraction": 0.6052009463310242,
"avg_line_length": 31.538461685180664,
"blob_id": "887719f7a50b0bd2bdcf3a543023a64328575e99",
"content_id": "53289515dd9ee87b94a12c21317fdb49da69f465",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 423,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 13,
"path": "/SelectionSort.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "Liste = [10,9,8,7,6,5,4,3,2,1]\n \nfor i in range(len(Liste)): #Alle Elemente der Liste durchlaufen\n minimum = i #Suche nach dem kleinsten Element in der Liste\n for j in range(i+1, len(Liste)):\n if Liste[minimum] > Liste[j]:\n minimum = j\n \n tmp = Liste[i] #Tausche kleinestes Element an die richtige Stelle\n Liste[i] = Liste[minimum]\n Liste[minimum] = tmp\n \nprint(Liste)\n"
},
{
"alpha_fraction": 0.6060402393341064,
"alphanum_fraction": 0.6174496412277222,
"avg_line_length": 31.844444274902344,
"blob_id": "5e53b4fbb52ac4b32cc5f865dfb3b1f7f25a9d2f",
"content_id": "56b9eb98d41d9cda5aa9fb9b034af021845d4070",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1493,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 45,
"path": "/Binary_Search.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "### Quelle: https://www.geeksforgeeks.org/python-program-for-binary-search/\n\ndef binary_search(arr, low, high, x):\n \n # Start der Suche\n if high >= low:\n \n #Finden der Mitte der (Teil) Liste (doppelter Slash = Integer)\n mid = (high + low) // 2\n \n # Falls die gefundene Mitte direkt das gesuchte Element ist\n if arr[mid] == x:\n return mid\n \n #Wenn das gesuchte Element kleiner als der Wert in der Mitte der Liste ist\n #dann suche es links\n elif arr[mid] > x:\n #Rufe die Funktion mit den neuen Start und Ende Stellen zur Suche auf\n #low bleibt (weil links) und das Ende ist die Position vor der Mitte\n return binary_search(arr, low, mid - 1, x)\n \n #Wenn das gesuchte Element größer als der Wert in der Mitte der Liste ist\n #dann suche es rechts\n else:\n #Rufe die Funktion mit den neuen Start und Ende Stellen zur Suche auf\n #low ist das Element rechts der Mitte und das Ende bleibt\n return binary_search(arr, mid + 1, high, x)\n \n else:\n #Falls es kein Ergebnis gibt, gib -1 zurück\n return -1\n \n\n#Testliste\narr = [ 2, 3, 4, 10, 40 ]\n#Gesuchter Wert\nx = 10\n \n# Aufruf der Funktion mit den Startwerten\nresult = binary_search(arr, 0, len(arr)-1, x)\n \nif result != -1:\n print(\"Das gesuchte Element befindet sich hier: \", str(result))\nelse:\n print(\"Das gesuchte Element konnte nicht gefunden werden!\")\n \n \n \n"
},
{
"alpha_fraction": 0.5990037322044373,
"alphanum_fraction": 0.6220423579216003,
"avg_line_length": 21.619718551635742,
"blob_id": "f16aa1eed0130b942e1252daefc897a56b9e798e",
"content_id": "0cea0a7143d32cceed86d443ed7e981821b01926",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1611,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 71,
"path": "/insertion_quick.md",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "Quick-Sort\n```python\n#Zeitmessung Quick Sort mit Visualisierung#\n#####Algorithmen und Datenstrukturen #####\nimport time\nimport random\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nzeit = [] #Liste für die Messungen\n\ndef quicksort(arr):\n if len(arr) <= 1:\n return arr\n else:\n pivot = arr[0]\n less = [x for x in arr[1:] if x <= pivot]\n greater = [x for x in arr[1:] if x > pivot]\n return quicksort(less) + [pivot] + quicksort(greater)\n\n\nfor i in range(10,1000):\n Liste = np.random.randint(low = 0, high = 1000, size = i) #Liste mit Werten von 1-1000 mit der Länge i\n\n \n start = time.time() #Stoppuhr Start\n quicksort(Liste) #Ausführung Quicksort\n ende = time.time() #Stoppuhr Ende\n \n gesamt = ende-start ##Differenz Start/Stopp\n zeit.append(gesamt)\n\n\nplt.plot(zeit) \nplt.show()\n```\n\nInsertion-Sort\n```python\n#Zeitmessung Insertion Sort mit Visualisierung#\n#####Algorithmen und Datenstrukturen #####\nimport time\nimport random\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nzeit = [] #Liste für die Messungen\n\n\nfor a in range(10,500):\n Liste = np.random.randint(low = 0, high = 1, size = a) #Liste mit Werten von 1-1000 mit der Länge i\n\n \n start = time.time() #Stoppuhr Start\n\n for i in range(1, len(Liste)):\n key = Liste[i]\n for j in range(i - 1, -1, -1):\n if Liste[j] > key:\n Liste[j + 1] = Liste[j]\n Liste[j] = key\n\n\n ende = time.time() #Stoppuhr Ende\n \n gesamt = ende-start ##Differenz Start/Stopp\n zeit.append(gesamt)\n\nplt.plot(zeit) \nplt.show()\n```\n"
},
{
"alpha_fraction": 0.6649746298789978,
"alphanum_fraction": 0.7258883118629456,
"avg_line_length": 19.736841201782227,
"blob_id": "3a8ae5b707a30282c4debc23e3c059cf8bae2d13",
"content_id": "37f1e0c8ec18562b23b62e0a98b8bd79aa6ed1b0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 394,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 19,
"path": "/listen_arrays_verkettete.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "from llist import sllist\nimport numpy as np\n\n\n# Standard Liste in Python (Dynamisches Array)\ndynamisches_array = []\nfor _ in range(1000):\n dynamisches_array.append(random.randint(1, 100))\n\n\n\n# Verkettte Liste mit dem Modul LLIST\nverkettete_liste = sllist()\nfor _ in range(1000):\n verkettete_liste.append(random.randint(1, 100))\n\n\n# Array mit Numpy\narray = np.random.randint(1, 100, 1000)\n"
},
{
"alpha_fraction": 0.485049843788147,
"alphanum_fraction": 0.5614618062973022,
"avg_line_length": 17.8125,
"blob_id": "b003d3a477a20f53bbd0829c4522473aad16d5b2",
"content_id": "21428e33080e1758f9d6f59302407a8d9eb9e18a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 301,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 16,
"path": "/Aufgabe_Schleifen.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "print(\"Aufgabe 1\")\n#Aufgabe 1\nfor i in range(1,20):\n print(i,end=\",\")\n\nprint(\"\\n\\nAufgabe 2\")\n#Aufgabe 2\nfor i in range(1,20):\n if((i%2)==0):\n print(i,end=\",\")\n \nprint(\"\\n\\nAufgabe 3\")\n#Aufgabe 3\nfor i in range(1,20):\n if(((i%2)==0) and (i%3)==0):\n print(i, \" ist durch 2 und 3 teilbar.\")\n"
},
{
"alpha_fraction": 0.6017786264419556,
"alphanum_fraction": 0.6284584999084473,
"avg_line_length": 19.653060913085938,
"blob_id": "d4b1dc3224bcae440753f7d36b40cab2cca4ecc8",
"content_id": "9abfccd71ae37dabb5767802437fadbaa37ebaad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1012,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 49,
"path": "/datenstrukturen_zeit.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "import time\nfrom llist import sllist\nimport numpy as np\n\n\nzeit_darray = []\nzeit_vliste = []\nzeit_array = []\n\nvon = 1\nbis = 100\n\nfor i in range(1000):\n # Creating list using Python default list\n start_time = time.time()\n\n my_list = []\n for _ in range(i):\n my_list.append(random.randint(von, bis))\n\n end_time = time.time()\n list_time = end_time - start_time\n zeit_darray.append(list_time)\n\n # Creating list using sllist module\n start_time = time.time()\n\n my_llist = sllist()\n for _ in range(i):\n my_llist.append(random.randint(1, 100))\n\n end_time = time.time()\n llist_time = end_time - start_time\n zeit_vliste.append(llist_time)\n\n # Creating array using NumPy\n start_time = time.time()\n\n my_array = np.random.randint(1, 100, i)\n\n end_time = time.time()\n array_time = end_time - start_time\n zeit_array.append(array_time)\n\nplt.plot(zeit_darray,'r')\nplt.plot(zeit_vliste,'g')\nplt.plot(zeit_array,'b')\nplt.axis([0, 1000, 0, 0.0006])\nplt.show()\n"
},
{
"alpha_fraction": 0.6517967581748962,
"alphanum_fraction": 0.6939281225204468,
"avg_line_length": 15.770833015441895,
"blob_id": "e953b3d09bf5bb2ef2eb934f4c2c7a4cb393894f",
"content_id": "d522a7411d840ffd98f41c6c176a0718b8785843",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 815,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 48,
"path": "/ArrayListe_Operationen.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "#Operationen auf Listen/Arrays#\n##### Algorithmen und Datenstrukturen #####\nimport numpy as np\n\n######## Liste ########\nListe = [1,2,3,4,5,6]\nprint(Liste)\n\n#Hinzufügen ans Ende\nListe.append(7)\nprint(Liste)\n\n#Hinzufügen an Stelle x\nListe.insert(1,10)\nprint(Liste)\n\n#Löschen von Eintrag mit Wert x\nListe.remove(5)\nprint(Liste)\n\n#Löschen von Eintrag an Stelle x\nListe.pop(0)\nprint(Liste)\n\n\n\n######## Array ########\nArray = np.array([1,2,3,4,5])\n\n#Hinzufügen ans Ende\nArray1 = np.append(Array,[10,11])\nprint(Array)\nprint(Array1)\n\n#Hinzufügen an Stelle x\nArray2 = np.insert(Array,1,20)\nprint(Array)\nprint(Array2)\n\n#Löschen von Eintrag mit Wert x\nprint(Array)\nArray4 = np.delete(Array, np.where(Array == 4))\nprint(Array4)\n\n#Löschen von Eintrag an Stelle x\nArray3 = np.delete(Array,1)\nprint(Array)\nprint(Array3)\n\n\n"
},
{
"alpha_fraction": 0.5898481011390686,
"alphanum_fraction": 0.6117080450057983,
"avg_line_length": 18.77941131591797,
"blob_id": "35091376fb4b656b46226ac0c28e80b17b09cc42",
"content_id": "573ce05ebcc818186f31f7cbc413aace1d599a57",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2699,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 136,
"path": "/lineareSuche.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "#Einfache lineare Suche\n\nliste = [3,5,1,10,8,7]\nsuche = 10\n\nfor i in range(len(liste)):\n if liste[i] == suche:\n print(\"gefunden bei\",i)\n break\n\n\n#Lineare Suche mit Funktion\ndef lineareSuche(gesucht,Liste): \n for i in range(len(liste)):\n if liste[i] == suche:\n print(\"gefunden bei\",i)\n break\n \nListe = [3,5,1,10,8,7]\nsuche = 3\nlineareSuche(Liste,suche)\n\n\n\n#Lineare Suche mit Funktion und Random Liste\nimport random\n\ndef lineareSuche(gesucht,liste): \n for i in range(len(liste)):\n if liste[i] == suche:\n print(\"gefunden bei\",i)\n break\n \nliste = random.sample(range(0, 11),11)\nprint(liste)\n\nsuche = 10\nlineareSuche(suche,liste)\n\n\n\n#Lineare Suche mit Funktion und wachsender Random Liste\nimport random\n\ndef lineareSuche(gesucht,liste): \n for i in range(len(liste)):\n if liste[i] == suche:\n print(\"gefunden bei\",i)\n break\n\nfor i in range(100):\n liste = random.sample(range(0, i),i)\n suche = 10\n lineareSuche(suche,liste)\n \n \n \n \n#Lineare Suche mit Funktion und wachsender Random Liste und Zeitmessung\nimport random\nimport time\n\ndef lineareSuche(gesucht,liste): \n for i in range(len(liste)):\n if liste[i] == suche:\n print(\"gefunden bei\",i)\n break\n\nfor i in range(100):\n liste = random.sample(range(0, i),i)\n \n suche = 10\n \n start = time.time() #Stoppuhr Start\n lineareSuche(suche,liste)\n ende = time.time() #Stoppuhr Ende\n \n gesamt = ende-start ##Differenz Start/Stopp\n print(round(gesamt*1000,2),\"ms\")\n \n\n\n#Lineare Suche mit Funktion und wachsender Random Liste und Zeitmessung und Visualisierung\nimport random\nimport time\nimport matplotlib.pyplot as plt\n\nzeit = []\n\ndef lineareSuche(gesucht,liste): \n for i in range(len(liste)):\n if liste[i] == suche:\n #print(\"gefunden bei\",i)\n break\n\nfor i in range(800):\n liste = random.sample(range(0, i),i)\n \n suche = 10\n \n start = time.time() #Stoppuhr Start\n lineareSuche(suche,liste)\n ende = time.time() #Stoppuhr Ende\n \n gesamt = ende-start ##Differenz Start/Stopp\n zeit.append(gesamt)\n \nplt.plot(zeit,'g')\nplt.show()\n\n\n\n#Variante lineare Suche rekursiv\n\nliste = [3,5,1,10,8,7]\nsuche = 1\n\nstart = 0\nende = len(liste)-1\n\n#print(liste[start])\n#print(liste[ende])\n\n\ndef linearNeu(liste,start,ende):\n if liste[start] == suche:\n print(\"gefunden bei\", start)\n return\n if liste[ende] == suche:\n print(\"gefunden bei\", ende)\n return\n start = start+1\n ende = ende-1\n linearNeu(liste,start,ende)\n \nlinearNeu(liste,start,ende)\n \n"
},
{
"alpha_fraction": 0.5987791419029236,
"alphanum_fraction": 0.6182019710540771,
"avg_line_length": 33,
"blob_id": "eb7f8a1234fbf04a52e0e90d812df3326552a628",
"content_id": "7d1c62fdb105246589dc0e84d40775497e33381d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1808,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 53,
"path": "/Zeitmessung_Visualisierung_BubbleSelection.py",
"repo_name": "bernhardstandl/algodat",
"src_encoding": "UTF-8",
"text": "#Zeitmessung Bubble Sort mit Visualisierung#\n#####Algorithmen und Datenstrukturen #####\nimport time\nimport random\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nzeit_bubble = [] #Liste für die Messungen Bubble Sort\nzeit_selection = [] #Liste für die Messungen Selection Sort\n\nfor i in range(10,200):\n Liste = np.random.randint(low = 0, high = 1000, size = i) #Liste mit Werten von 1-1000 mit der Länge i\n \n start = time.time() #Stoppuhr Start\n \n ##### Programm Start #####\n N = len(Liste) #Länge der Liste\n for i in range(N): #Schleife außen\n for j in range(N-1): #Schleife innen\n if(Liste[j+1]<Liste[j]): ## Logische Bedingung\n tmp = Liste[j+1] #kleinerer Wert in tmp\n Liste[j+1] = Liste[j] #rechts wird links\n Liste[j] = tmp #links wird tmp\n ##### Programm Ende ##### \n\n ende = time.time() #Stoppuhr Ende\n gesamt = ende-start ##Differenz Start/Stopp\n zeit_bubble.append(gesamt)\n \nfor i in range(10,200):\n Liste = np.random.randint(low = 0, high = 1000, size = i) #Liste mit Werten von 1-1000 mit der Länge i\n \n start = time.time() #Stoppuhr Start\n \n ##### Programm Start #####\n for i in range(len(Liste)): #Alle Elemente der Liste durchlaufen\n minimum = i #Suche nach dem kleinsten Element in der Liste\n for j in range(i+1, len(Liste)):\n if Liste[minimum] > Liste[j]:\n minimum = j\n\n tmp = Liste[i] #Tausche kleinestes Element an die richtige Stelle\n Liste[i] = Liste[minimum]\n Liste[minimum] = tmp\n ##### Programm Ende ##### \n\n ende = time.time() #Stoppuhr Ende\n gesamt = ende-start ##Differenz Start/Stopp\n zeit_selection.append(gesamt)\n\nplt.plot(zeit_bubble,'r')\nplt.plot(zeit_selection,'g')\nplt.show()\n"
}
] | 17 |
ozhar1248/trivia_game | https://github.com/ozhar1248/trivia_game | 5a2b07714b9411fe852df770bbfce0166475a42d | 4cdb2c979755e7de68c5ff2894f6ac39d995da3e | 0863e2d044575319ebb7969bf2a337ee508ea6f0 | refs/heads/main | 2023-01-29T11:09:28.424398 | 2020-12-14T12:27:30 | 2020-12-14T12:27:30 | 321,339,767 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.581477165222168,
"alphanum_fraction": 0.5873388051986694,
"avg_line_length": 36.772727966308594,
"blob_id": "5288cd73106cacf1899fba1230e9cbe2763fd1fc",
"content_id": "a1ff9dadb0432fb54d1ed25ef5ef1d0b75f8e9a7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 853,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 22,
"path": "/quiz_brain.py",
"repo_name": "ozhar1248/trivia_game",
"src_encoding": "UTF-8",
"text": "class QuizBrain:\r\n def __init__(self, bank):\r\n self.question_bank = bank\r\n self.question_number = 0\r\n self.score = 0\r\n\r\n def next_question(self):\r\n ans = input(f\"Q.{self.question_number+1}: {self.question_bank[self.question_number].question} (True / False): \")\r\n self.check_answer(ans, self.question_bank[self.question_number].answer)\r\n self.question_number += 1\r\n\r\n def has_questions(self):\r\n return self.question_number < len(self.question_bank)\r\n\r\n def check_answer(self, user_ans, correct_ans):\r\n if user_ans.lower() == correct_ans.lower():\r\n print(\"Right!\")\r\n self.score += 1\r\n else:\r\n print(\"Wrong!\")\r\n print(f\"The correct answer is {correct_ans}\")\r\n print(f\"Your current score is {self.score}/{len(self.question_bank)}\\n\")\r\n"
},
{
"alpha_fraction": 0.7023255825042725,
"alphanum_fraction": 0.7116279006004333,
"avg_line_length": 26.66666603088379,
"blob_id": "02f8611eb215c8f2339766de9b63f1dbf6c0dda3",
"content_id": "d4a02b4bd986146e54ba6361b13588fc64173b98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 430,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 15,
"path": "/main.py",
"repo_name": "ozhar1248/trivia_game",
"src_encoding": "UTF-8",
"text": "from question import Question\r\nfrom data import question_data\r\nfrom quiz_brain import QuizBrain\r\n\r\nquestion_bank = []\r\nfor item in question_data:\r\n question_bank.append(Question(item[\"text\"], item[\"answer\"]))\r\n\r\nquiz = QuizBrain(question_bank)\r\n\r\nwhile quiz.has_questions():\r\n quiz.next_question()\r\n\r\ngrade = round(quiz.score / len(question_bank) * 100, 2)\r\nprint(f\"You've completed the quiz\\nYour final score is {grade}\")\r\n"
}
] | 2 |
Eiyeron/telegram-bot-api | https://github.com/Eiyeron/telegram-bot-api | 143c2278584e016df12592f355914f5a0defaf75 | fe1b765f418deaa22781d8fa345c99f44911b807 | d6bd80851680957771dbb00dde482c8d10249e7f | refs/heads/master | 2020-05-16T21:25:44.661068 | 2016-12-15T13:06:32 | 2016-12-15T13:06:32 | 38,127,095 | 22 | 5 | null | 2015-06-26T18:28:41 | 2015-07-20T07:21:40 | 2015-07-20T15:06:19 | Python | [
{
"alpha_fraction": 0.5344458818435669,
"alphanum_fraction": 0.536157488822937,
"avg_line_length": 28.031055450439453,
"blob_id": "a9569634e75e3559ab6337d478aafeed48e48476",
"content_id": "884e354eecb0c6bed4939dbc79e6b710d05d2169",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4674,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 161,
"path": "/models.py",
"repo_name": "Eiyeron/telegram-bot-api",
"src_encoding": "UTF-8",
"text": "# Using __dict__ and *args for compulsory args and **kwargs for optional ones.\n\n\nclass User(object):\n def __init__(self, *args):\n try:\n self.__dict__ = args[0]\n except:\n pass\n\n\nclass GroupChat(object):\n def __init__(self, *args):\n try:\n self.__dict__ = args[0]\n except:\n pass\n\n\n# Todo? : Inheritance and create a File superclass\n# for all file-related classes?\nclass PhotoSize:\n def __init__(self, data):\n if not data:\n return\n self.file_id = data[\"file_id\"]\n self.width = data[\"width\"]\n self.height = data[\"height\"]\n self.file_size = data.get(\"file_size\", -1)\n\n\nclass Audio:\n def __init__(self, data):\n self.file_id = data[\"file_id\"]\n self.duration = data[\"duration\"]\n self.mime_type = data[\"mime_type\"]\n self.file_size = data.get(\"file_size\", -1)\n\n\nclass Document:\n def __init__(self, data):\n self.file_id = data[\"file_id\"]\n if 'thumb' in data:\n self.thumb = PhotoSize(data[\"thumb\"])\n self.file_name = data.get(\"file_name\", \"\")\n self.mime_type = data.get(\"mime_type\", \"\")\n self.file_size = data.get(\"file_size\", -1)\n\n\nclass Sticker:\n def __init__(self, data):\n self.file_id = data[\"file_id\"]\n self.width = data[\"width\"]\n self.height = data[\"height\"]\n if 'thumb' in data:\n self.thumb = PhotoSize(data[\"thumb\"])\n self.file_size = data.get(\"file_size\", -1)\n\n\nclass Video:\n def __init__(self, data):\n self.file_id = data[\"file_id\"]\n self.width = data[\"width\"]\n self.height = data[\"height\"]\n self.duration = data[\"duration\"]\n if 'thumb' in data:\n self.thumb = PhotoSize(data[\"thumb\"])\n self.mime_type = data.get(\"mime_type\", \"\")\n self.file_size = data.get(\"file_size\", -1)\n self.caption = data.get(\"caption\", \"\")\n\n\nclass Contact:\n def __init__(self, data):\n self.phone_number = data[\"phone_number\"]\n self.first_name = data[\"first_name\"]\n self.last_name = data.get(\"last_name\", \"\")\n self.user_id = data.get(\"user_id\", \"\")\n\n\nclass Location:\n def __init__(self, data):\n self.longitude = data[\"longitude\"]\n self.latitude = data[\"latitude\"]\n\n\nclass UserProfilePhotos:\n def __init__(self, data):\n self.total_count = data[\"total_count\"]\n self.photos = []\n for row in data[\"photos\"]:\n self.photos.append(list(row))\n\n\nclass ReplyKeyBoard(object):\n def __init__(self, **kwargs):\n self.selective = kwargs.get('selective', False)\n\n\nclass ReplyKeyboardMarkup(ReplyKeyBoard):\n\n def __init__(self, keyboard, **kwargs):\n ReplyKeyBoard.__init__(self, **kwargs)\n self.keyboard = keyboard\n self.reisze_keyboard = kwargs.get(\"resize_keyboard\", False)\n self.one_time_keyboard = kwargs.get(\"one_time_keyboard\", False)\n\n\nclass ReplyKeyboardHide(ReplyKeyBoard):\n\n def __init__(self, **kwargs):\n ReplyKeyBoard.__init__(self, **kwargs)\n self.hide_keyboard = True\n\n\nclass ForceReply(ReplyKeyBoard):\n\n def __init__(self, **kwargs):\n ReplyKeyBoard.__init__(self, **kwargs)\n self.force_reply = True\n\n\nreplace_dict = {'forward_from': User,\n 'audio': Audio,\n 'document': Document,\n 'sticker': Sticker,\n 'video': Video,\n 'contact': Contact,\n 'location': Location,\n 'new_chat_participant': User,\n 'left_chat_participant': User\n }\n\n\nclass Message(object):\n def __init__(self, *args):\n message_dict = {}\n\n for attr, attr_value in args[0].items():\n\n if attr == 'from':\n message_dict['from_user'] = User(attr_value)\n elif attr == 'chat':\n # Finding if we have a GroupChat or an User\n if 'first_name' in attr_value:\n message_dict[attr] = User(attr_value)\n elif 'title' in attr_value:\n message_dict[attr] = GroupChat(attr_value)\n elif attr in replace_dict:\n message_dict[attr] = replace_dict[attr](attr_value)\n elif attr == \"reply_to_message\":\n message_dict[attr] = Message(attr_value)\n elif attr in (\"photo\", \"new_chat_photo\"):\n photos = []\n for photo in attr_value:\n photos.append(PhotoSize(photo))\n message_dict[attr] = photos\n else:\n message_dict[attr] = attr_value\n\n self.__dict__ = message_dict\n"
},
{
"alpha_fraction": 0.7512690424919128,
"alphanum_fraction": 0.7524404525756836,
"avg_line_length": 50.220001220703125,
"blob_id": "33bf13422238889e8d91be297ff962aa80115b60",
"content_id": "8c5f4f84cb925c2166111afd449dc57a836270a9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2561,
"license_type": "permissive",
"max_line_length": 329,
"num_lines": 50,
"path": "/README.md",
"repo_name": "Eiyeron/telegram-bot-api",
"src_encoding": "UTF-8",
"text": "# Deprecated\nThis library is now outdated, I don't plan on updating it anymore and there must be better libraries to manage Telegram bots in Python.\n# telegram-bot-api\nYet another event-based Python 3.X/Telegram-Bot-Api library. It's built and thought to be modular and plugin-based. Hook your own Handler and it'll receive updates according to the message type.\n\n## Prerequisites\n- Python 3\n- requests module.\n\n## Todo\n- Auto-reloading plugins/handlers\n- Add more API functions\n- Investigate why the API doesn't send replies from X to Y and not to the bot.\n- Document a little bit\n- Document a little bit more\n- Try to apply DRY\n- Investigate how Python libraries work and (eventually) suggest this one once it became developped enough\n\n## How the Update Api work?\nA `Telegram` object has to be created and given as argument your bot's token and Telegram's API endpoint. After then you have to add handlers to this object with `addHandler(object)`. If an object implements one or more of the functions supported by `Telegram` update notifier, it'll be called on each of these functions.\n\n### File uploading\nTelegram's Bot API allow sending and resend documents on the same endpoints (\"sendFOOBAR\"), so the library functions act the same way.\n\n- Sending the already sent document's `file_id` will send a resend request, avoiding to upload again the document for everyone. This seems (not totally tested yet) to work on every document type (as stickers for instance).\n- Sending the document, like passing an `open(\"file\", \"rb\")` (`rb` to avoid errors when trying to decode Unicode in binary files) to the function will upload the file to the servers. Make sure that your file hasn't been already downloaded as the servers doesn't check if it already exists and you'll get a new `file_id` as reply.\n\n## Quick Example\n\nNote : In this example, the API library is stored in `app/` folder.\n\n```python\n#!/usr/bin/python3\nimport configparser\n\nfrom app.telegram import Telegram, Message\nfrom app.handlers.loggerHandler import LoggerHandler\n\n\nif __name__ == '__main__':\n config = configparser.ConfigParser()\n config.read('config.ini')\n try:\n tg = Telegram(config[\"Telegram\"][\"apiURL\"],config[\"Telegram\"][\"token\"])\n loggerHandler = LoggerHandler(\"chat.log\")\n tg.addHandler(loggerHandler)\n tg.processUpdates()\n except:\n print(\"There had been a problem while reading configuration file, please make sure that a config.ini file exists in the same folder than this one and that it follow the right configuration structure.\")\n```\n"
},
{
"alpha_fraction": 0.5407441854476929,
"alphanum_fraction": 0.5415555834770203,
"avg_line_length": 42.1349983215332,
"blob_id": "de1be71c11e3ec64889fb326e39aa7e008dc4ffb",
"content_id": "b0074a66f1f768cc5a7a46c4c4804bf07bfd4ffd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8627,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 200,
"path": "/telegram.py",
"repo_name": "Eiyeron/telegram-bot-api",
"src_encoding": "UTF-8",
"text": "import requests\nimport sys\nfrom .models import Message\nimport json\n\n\nclass Telegram:\n \"\"\"This class wraps the (almost) whole Telegram API and offers a\n handler-based update system to plug to the interface whatever functionality\n you want.\"\"\"\n # TODO ? : Convert this into a simple array\n # and get value by doing \"on_\"+\"value\"\n handlerTypeCallback = {\n \"update\": \"on_update\",\n \"forward_from\": \"on_forward\",\n \"reply_to_message\": \"on_reply\",\n \"text\": \"on_text\",\n \"audio\": \"on_audio\",\n \"document\": \"on_document\",\n \"photo\": \"on_photo\",\n \"sticker\": \"on_sticker\",\n \"video\": \"on_video\",\n \"contact\": \"on_contact\",\n \"location\": \"on_location\",\n \"new_chat_participant\": \"on_new_chat_carticipant\",\n \"left_chat_participant\": \"on_left_chat_participant\",\n \"new_chat_title\": \"on_new_chat_title\",\n \"new_chat_photo\": \"on_new_chat_photo\",\n \"delete_chat_Photo\": \"on_delete_chat_photo\",\n \"group_chat_created\": \"on_group_chat_created\",\n }\n\n def __init__(self, api_url, token):\n self.api_url = api_url\n self.access_token = token\n self.loopingUpdateHandler = False\n self.lastID = 0\n self.handlers = []\n\n def send_request(self, action, params={}, files=[]):\n \"\"\"Wraps the url building and sends the requst to Telegram's servers.\n Returns the processed data in JSON or a JSON object containing the\n error message.\"\"\"\n url = \"{}{}/{}\".format(self.api_url, self.access_token, action)\n r = requests.get(url, params=params, files=files)\n try:\n return r.json()\n except ValueError:\n print(\"There has been a parsing error on this message : {}\"\n .format(r.text))\n return {\"ok\": False,\n \"why\": \"Parsing Error\",\n \"message\": r.text}\n\n def send_file(self, chat_id, command, method, file_data,\n reply_to_message_id=\"\",\n reply_markup=\"\"):\n \"\"\"Wraps the file sending process.\"\"\"\n args = {\"chat_id\": chat_id,\n \"reply_to_message_id\": reply_to_message_id,\n \"reply_markup\": reply_markup}\n files = {}\n # Checking if it's a resend id.\n if isinstance(file_data, str):\n args[method] = file_data\n else:\n files[method] = file_data\n\n return self.send_request(command, args, files)\n\n def get_updates(self, offset=0, limit=100, timeout=0):\n \"\"\"Using /getUpdates to poll updates from Telegram.\"\"\"\n return self.send_request(\"getUpdates\", {\"offset\": offset,\n \"limit\": limit,\n \"timeout\": timeout})\n\n def send_message(self, chat_id, text,\n reply_to_message_id=None,\n reply_markup=None):\n \"\"\"Sends a text-only message to a chat/user.\"\"\"\n params = {\"chat_id\": chat_id, \"text\": text}\n if reply_to_message_id is not None:\n params[\"reply_to_message_id\"] = reply_to_message_id\n\n if reply_markup is not None:\n params[\"reply_markup\"] = reply_markup\n\n return self.send_request(\"sendMessage\", params)\n\n def send_keyboard_markup(self, chat_id, keyboard, message,\n resize_keyboard=False,\n one_time_keyboard=False,\n selective=False):\n reply_markup = {\n \"keyboard\": keyboard,\n \"resize_keyboard\": resize_keyboard,\n \"one_time_keyboard\": one_time_keyboard,\n \"selective\": selective}\n\n return self.send_message(chat_id, message, None,\n json.dumps(reply_markup,\n separators=(',', ':')))\n\n def forward_message(self, chat_id, from_chat_id, message_id):\n \"\"\"Forwards a message from a chat to another chat.\"\"\"\n return self.send_request(\"forwardMessage\",\n {\"chat_id\": chat_id,\n \"from_chat_id\": from_chat_id,\n \"message_id\": message_id})\n\n def get_me(self):\n \"\"\"Returns the basic infos about the bot. Good function for testing\n if communicating to Telegram works.\"\"\"\n return self.send_request(\"getMe\")\n\n def send_photo(self, chat_id, photo,\n reply_to_message_id=\"\", reply_markup=\"\"):\n \"\"\"Sends a photo the \"quick way\", a client will receive a smaller,\n compressed version of the original file. Prefer send_document if\n you need the original version to be sent.\"\"\"\n return self.send_file(chat_id, \"sendPhoto\", \"photo\", photo,\n reply_to_message_id, reply_markup)\n\n def send_audio(self, chat_id, audio,\n reply_to_message_id=\"\", reply_markup=\"\"):\n \"\"\"Sends an audio file.\"\"\"\n return self.send_file(chat_id, \"sendAudio\", \"audio\", audio,\n reply_to_message_id, reply_markup)\n\n def send_document(self, chat_id, document,\n reply_to_message_id=\"\", reply_markup=\"\"):\n \"\"\"Sends a document, whatever its filetype is. Perfect for sending\n pictures without affecting their quality/size, GIFs, or all the files\n you want.\"\"\"\n return self.send_file(chat_id, \"sendDocument\", \"document\", document,\n reply_to_message_id, reply_markup)\n\n def send_sticker(self, chat_id, sticker,\n reply_to_message_id=\"\", reply_markup=\"\"):\n \"\"\"Sends a sticker to the given chat. You have to find a way\n to know the sticker id before as no infos are given on them\n unless you were sent one.\"\"\"\n return self.send_file(chat_id, \"sendSticker\", \"sticker\", sticker,\n reply_to_message_id, reply_markup)\n\n def send_video(self, chat_id, video,\n reply_to_message_id=\"\", reply_markup=\"\"):\n \"\"\"Sends a video. Looks like Telegram's servers compress\n and scale down them. Prefer send_document if you need the\n original version to be sent.\"\"\"\n return self.send_file(chat_id, \"sendVideo\", \"video\", video,\n reply_to_message_id, reply_markup)\n\n def send_location(self, chat_id, latitude, longitude,\n reply_to_message_id=\"\", reply_markup=\"\"):\n \"\"\"Sends a location. The client will see a map frame with\n given location\"\"\"\n return self.send_request(\"sendLocation\",\n {\"chat_id\": chat_id,\n \"latitude\": latitude,\n \"longitude\": longitude,\n \"reply_to_message_id\": reply_to_message_id,\n \"reply_to_message_id\": reply_markup})\n\n def add_handler(self, handler):\n \"\"\"Adds a update handler to the current instance.\"\"\"\n if \"callback\" not in self.handlers:\n self.handlers.append(handler)\n\n def remove_handler(self, callback, **kwargs):\n \"\"\"Checks if the handlers exists and removes it.\"\"\"\n if callback in self.handlers:\n self.handlers.remove(callback)\n\n def call_handlers(self, message):\n \"\"\"Internal function to notifiy handlers based on their\n implemented entry points.\"\"\"\n for handler in self.handlers:\n for k, v in self.handlerTypeCallback.items():\n if (k == \"update\" or hasattr(message, k))\\\n and hasattr(handler, v):\n try:\n getattr(handler, v)(self, message)\n except:\n print(\"\"\"Oops, there has been a problem\n with this handler : {}\"\"\".format(handler))\n print(sys.exc_info())\n\n def process_updates(self):\n \"\"\"Pools updates and dispatches them to the handlers.\"\"\"\n self.loopingUpdateHandler = True\n while self.loopingUpdateHandler:\n notifications = self.get_updates(self.lastID)\n if notifications[\"ok\"] is True:\n for notification in notifications['result']:\n self.lastID = max(self.lastID, notification[\"update_id\"])+1\n message = Message(notification[\"message\"])\n self.call_handlers(message)\n else:\n print(\"Oops, something went bad : {}\".format(notifications))\n"
}
] | 3 |
Deepaksinghpatel052/dashify | https://github.com/Deepaksinghpatel052/dashify | 7c77717059842acfa41f02f9b269ec3afe24271b | 00e4e7b6836a41d55548a8220fc7000f05a1a1c5 | e84829a9b3dd29a1d861ac89eb13cf6722103398 | refs/heads/master | 2022-12-26T02:25:38.079168 | 2020-10-03T13:12:31 | 2020-10-03T13:12:31 | 292,115,362 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7093541026115417,
"alphanum_fraction": 0.7160356640815735,
"avg_line_length": 27.967741012573242,
"blob_id": "d8ea8dfbe32008059dee7fcf974678c6f52ed9e0",
"content_id": "f231c323e52f865c98054a8e0b56a85d9f9d24d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 898,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 31,
"path": "/manage_faqs/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport django\nfrom autoslug import AutoSlugField\nfrom datetime import date\n# Create your models here.\n\n\nclass DfFaqCategory(models.Model):\n Category = models.CharField(max_length=120)\n\n def __str__(self):\n return self.Category\n\n class Meta:\n verbose_name_plural = \"Df Faq Category\"\n\n\n\n\nclass DfFaqs(models.Model):\n Category = models.ForeignKey(DfFaqCategory, on_delete=models.SET_NULL, null=True, blank=True,related_name='DfFaqs_Category')\n Question = models.CharField(max_length=120)\n Question_slug = AutoSlugField(populate_from='Question', always_update=True,unique_with='Create_date__month',null=True, blank=True)\n Ansews = models.TextField()\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\n\n def __str__(self):\n return self.Question\n\n class Meta:\n verbose_name_plural = \"DF Question\"\n"
},
{
"alpha_fraction": 0.7099447250366211,
"alphanum_fraction": 0.7099447250366211,
"avg_line_length": 34.400001525878906,
"blob_id": "bfa2a7cb6884e284c20f4e5fe604246aec02b3ab",
"content_id": "ffe713a6671a070efb6a9bdae424cc8ddbd2b561",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 362,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 10,
"path": "/social_media_platforms/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\n\r\n\r\nurlpatterns = [\r\n path('add-account', views.AddSocialMedia.as_view()),\r\n path('get-all-social-platforms', views.AllSocialPlatforms.as_view()),\r\n path('get-one-social-platforms', views.OneSocialPlatforms.as_view()),\r\n path('remove-one-social-platforms', views.RemoveSocialPlatforms.as_view()),\r\n]"
},
{
"alpha_fraction": 0.6166950464248657,
"alphanum_fraction": 0.6695059537887573,
"avg_line_length": 29.894737243652344,
"blob_id": "c3153950a6d3c40a6b006620448376643088928a",
"content_id": "6aea5d4095a91881939011dc480a469d5af9c365",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 587,
"license_type": "no_license",
"max_line_length": 185,
"num_lines": 19,
"path": "/manage_locations/migrations/0019_auto_20200420_1405.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-20 14:05\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0018_auto_20200420_1111'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dflocationopenhours',\n name='Business_Location',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Df_location_poen_hour', to='manage_locations.DfBusinessLocation'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5097312331199646,
"alphanum_fraction": 0.5523632764816284,
"avg_line_length": 27.394737243652344,
"blob_id": "913e34aebbdfd6dad1a9cf56de82615324221ec7",
"content_id": "a68c46028cca7e8315ad16df40ec1190ebe0ac43",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1079,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 38,
"path": "/manage_campus/migrations/0006_auto_20200811_1123.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-11 11:23\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_campus', '0005_auto_20200811_0951'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfcampaign',\n name='Head',\n field=models.CharField(blank=True, max_length=500, null=True),\n ),\n migrations.AlterField(\n model_name='dfcampaign',\n name='Sent_from',\n field=models.CharField(max_length=500),\n ),\n migrations.AlterField(\n model_name='dfcampaign',\n name='Subject',\n field=models.CharField(blank=True, max_length=500, null=True),\n ),\n migrations.AlterField(\n model_name='dfcampaign',\n name='Title',\n field=models.CharField(max_length=500),\n ),\n migrations.AlterField(\n model_name='dfcampaign',\n name='replay_to',\n field=models.CharField(max_length=500),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7113115787506104,
"alphanum_fraction": 0.7196391224861145,
"avg_line_length": 39.228572845458984,
"blob_id": "dbbfd0a4941aab715ab2463a0c2823ba3a261c42",
"content_id": "beb1bd96d6e574bac18d9c63ba1e1dacb07e51f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1441,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 35,
"path": "/accounts/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom django.contrib.auth.models import User\r\nimport django\r\nfrom django.conf import settings\r\nfrom django.db.models.signals import post_save\r\nfrom django.dispatch import receiver\r\nfrom rest_framework.authtoken.models import Token\r\n# Create your models here.\r\nclass testUser(models.Model):\r\n user_name = models.CharField(max_length=20)\r\n date = models.DateField()\r\n\r\n def __str__(self):\r\n return self.user_name\r\nclass DfUser(models.Model):\r\n user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True)\r\n first_name = models.CharField(max_length=20,null=True,blank=True)\r\n last_name = models.CharField(max_length=20,null=True,blank=True)\r\n Company_name = models.CharField(max_length=20,null=True,blank=True)\r\n Country = models.CharField(max_length=20,null=True,blank=True)\r\n Phone = models.IntegerField(null=True,blank=True)\r\n Zip = models.IntegerField(null=True,blank=True)\r\n UserType = models.CharField(max_length=20,default=\"User\")\r\n Last_login = models.DateTimeField(null=True,blank=True)\r\n Create_date =models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return self.first_name\r\n class Meta:\r\n verbose_name_plural = \"DF User\"\r\n\r\n@receiver(post_save,sender=User)\r\ndef create_auth_token(sender,instance=None,created=False,**kwargs):\r\n if created:\r\n Token.objects.create(user=instance)"
},
{
"alpha_fraction": 0.6928104758262634,
"alphanum_fraction": 0.6928104758262634,
"avg_line_length": 28.799999237060547,
"blob_id": "8892d999c9addbf8edccce78d7d1ec636ac06a37",
"content_id": "89fccbf54e4dcd8a0249b2b61e9868377dec0bd2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 306,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 10,
"path": "/manage_pricing/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('', views.GetPriceList.as_view()),\r\n path('one-package/<int:pk>', views.GetOnePackage.as_view()),\r\n # path('job-application', views.AddJobsApplication.as_view()),\r\n]"
},
{
"alpha_fraction": 0.6822705268859863,
"alphanum_fraction": 0.6852192878723145,
"avg_line_length": 38.89706039428711,
"blob_id": "df6ff42fc1d3ac0a15c6e36928b5a848701a2c9d",
"content_id": "12b1eaf214cb008841d30023d6323f575e01c890",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2713,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 68,
"path": "/dashifyproject/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "\"\"\"dashify URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/3.0/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.contrib import admin\nfrom django.urls import path,include,re_path\nfrom django.conf import settings\nfrom django.conf.urls.static import static\nfrom accounts import views\nfrom django.views.generic import TemplateView\n\nurlpatterns = [\n path('api/admin/', admin.site.urls),\n # path('', TemplateView.as_view(template_name=\"index.html\")),\n\n path('api/account', include('accounts.urls')),\n path('api/account/', include('accounts.urls')),\n\n path('api/dropdown-values', include('manage_dropdown_value.urls')),\n path('api/dropdown-values/', include('manage_dropdown_value.urls')),\n\n path('api/locations', include('manage_locations.urls')),\n path('api/locations/', include('manage_locations.urls')),\n\n path('api/social-platforms', include('social_media_platforms.urls')),\n path('api/social-platforms/', include('social_media_platforms.urls')),\n\n path('api/voice-faq', include('manage_voice_faqs.urls')),\n path('api/voice-faq/', include('manage_voice_faqs.urls')),\n\n path('api/reviews', include('reviews.urls')),\n path('api/reviews/', include('reviews.urls')),\n\n path('api/campaign', include('manage_campus.urls')),\n path('api/campaign/', include('manage_campus.urls')),\n\n path('api/queryes', include('queryes.urls')),\n path('api/queryes/', include('queryes.urls')),\n\n path('api/bloges', include('manage_bloges.urls')),\n path('api/bloges/', include('manage_bloges.urls')),\n\n path('api/faqs', include('manage_faqs.urls')),\n path('api/faqs/', include('manage_faqs.urls')),\n\n path('api/jobs', include('manage_jobs.urls')),\n path('api/jobs/', include('manage_jobs.urls')),\n\n path('api/package-pricing', include('manage_pricing.urls')),\n path('api/package-pricing/', include('manage_pricing.urls')),\n\n path('api/order-and-payments', include('manage_orders_and_payments.urls')),\n path('api/order-and-payments/', include('manage_orders_and_payments.urls')),\n\n re_path(r'^(?:.*)/?$', TemplateView.as_view(template_name=\"index.html\")\t)\n\n] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n"
},
{
"alpha_fraction": 0.5331753492355347,
"alphanum_fraction": 0.6066350936889648,
"avg_line_length": 22.44444465637207,
"blob_id": "0bbfe1e4c665c93624e4ecc224ab4389858e87ee",
"content_id": "d0443dd5cdd614af40f06f91e771831ebafecedf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 422,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 18,
"path": "/manage_locations/migrations/0015_auto_20200416_1224.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-16 12:24\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0014_auto_20200416_1223'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='dfbusinesslocation',\n old_name='Do_Not_Publish_My_Address',\n new_name='Do_not_publish_my_address',\n ),\n ]\n"
},
{
"alpha_fraction": 0.6838487982749939,
"alphanum_fraction": 0.6838487982749939,
"avg_line_length": 27.299999237060547,
"blob_id": "54b8ed8ae0c473926dd84d0d4a1f6fd3bbdbcb89",
"content_id": "9a630c8ff01eab887105e9c6497e5052b5bb3624",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 291,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 10,
"path": "/manage_jobs/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('', views.GetJobs.as_view()),\r\n path('one-job/<int:pk>', views.GetOneJob.as_view()),\r\n path('job-application', views.AddJobsApplication.as_view()),\r\n]"
},
{
"alpha_fraction": 0.5446717143058777,
"alphanum_fraction": 0.581270158290863,
"avg_line_length": 31.034482955932617,
"blob_id": "a28c93f58f22d069d95fae5a1deb3bd7921c1169",
"content_id": "00dc64ac81d7d815b83bee496b75271874cf7997",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 929,
"license_type": "no_license",
"max_line_length": 169,
"num_lines": 29,
"path": "/manage_faqs/migrations/0003_auto_20200922_0904.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 09:04\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_faqs', '0002_auto_20200910_1121'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfFaqCategory',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Category', models.CharField(max_length=120)),\n ],\n options={\n 'verbose_name_plural': 'Df Faq Category',\n },\n ),\n migrations.AddField(\n model_name='dffaqs',\n name='Category',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='DfFaqs_Category', to='manage_faqs.DfFaqCategory'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5124481320381165,
"alphanum_fraction": 0.5829875469207764,
"avg_line_length": 25.77777862548828,
"blob_id": "7ebed53e291f10e2375a694f5fda0c4f3d40f6cf",
"content_id": "739c550ae0df40eba5d1b9210961aea1b8fde008",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 482,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 18,
"path": "/manage_pricing/migrations/0003_auto_20200922_1110.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 11:10\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0002_auto_20200922_1108'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfprice',\n name='Duration_Type',\n field=models.CharField(blank=True, choices=[('D', 'Days'), ('M', 'Mohnth'), ('Y', 'Year')], max_length=120, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5829645991325378,
"alphanum_fraction": 0.5864412188529968,
"avg_line_length": 48.59199905395508,
"blob_id": "1597157b2bf8a8a165bb4d28a4d0d898ca3fb233",
"content_id": "f15dc086e0a74bcdbe8f5d1fc89d33a4ec0d5882",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6328,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 125,
"path": "/social_media_platforms/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.shortcuts import get_object_or_404\r\nfrom rest_framework import exceptions\r\nfrom accounts.models import DfUser\r\nfrom .models import DfSocialMedia\r\nfrom manage_locations.models import DfLocationConnectPlatform,DfBusinessLocation\r\n\r\n\r\nclass GetSocialMediaSerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfSocialMedia\r\n fields = ['id', 'Platform', 'Token', 'Username', 'Email', 'Password',\r\n 'Connect_status', 'Other_info', 'Craete_Date', 'Update_Date','DfUser']\r\n depth = 2\r\n\r\n\r\n\r\n\r\nclass AddSocialcMediaSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Platform = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Token = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n Username = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n Email = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n Password = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n Connect_status = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n Other_info = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n Platform = data.get(\"Platform\", \"\")\r\n Token = data.get(\"Token\", \"\")\r\n Username = data.get(\"Username\", \"\")\r\n Email = data.get(\"Email\", \"\")\r\n Password = data.get(\"Password\", \"\")\r\n Connect_status = data.get(\"Connect_status\", \"\")\r\n Other_info = data.get(\"Other_info\", \"\")\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser,user_id=user_id)\r\n \r\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,DfUser=get_Dfuser_ins)\r\n add_social_media = DfSocialMedia(\r\n DfUser = get_Dfuser_ins,\r\n Platform=Platform,\r\n Token=Token,\r\n Username=Username,\r\n Email=Email,\r\n Password=Password,\r\n Connect_status=Connect_status,\r\n Other_info=Other_info\r\n )\r\n add_social_media.save()\r\n\r\n Connection_Status = Connect_status\r\n connect_plat = DfLocationConnectPlatform(\r\n DfUser = get_Dfuser_ins,\r\n Business_Location = get_DfBusinessLocation_ins,\r\n Social_Platform = add_social_media,\r\n Connection_Status = Connection_Status\r\n )\r\n connect_plat.save()\r\n data[\"social_platfrom_id\"] = add_social_media.id\r\n data[\"conect_to_location_id\"] = connect_plat.id\r\n else:\r\n mes = \"Your location_id is incorrect\"\r\n raise exceptions.ValidationError(mes) \r\n else:\r\n mes = \"Your user id is incorrect\"\r\n raise exceptions.ValidationError(mes)\r\n return data\r\n\r\nclass OneSocialcMediaSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n platform_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n platform_id = data.get(\"platform_id\", \"\")\r\n get_data = None\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfSocialMedia.objects.filter(id=platform_id).exists():\r\n get_one_SM_platform = get_object_or_404(DfSocialMedia,id=platform_id)\r\n if get_one_SM_platform.DfUser.id == get_Dfuser_ins.id:\r\n get_data = get_one_SM_platform\r\n else:\r\n msg = \"This platform_id is not related to currend login user\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n msg = \"Platform_id is not exists\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n mes = \"Your user id is incorrect\"\r\n raise exceptions.ValidationError(mes)\r\n return get_data\r\n\r\n\r\nclass RemoveOneSocialcMediaSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n platform_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n platform_id = data.get(\"platform_id\", \"\")\r\n get_data = None\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfSocialMedia.objects.filter(id=platform_id).exists():\r\n get_one_SM_platform = get_object_or_404(DfSocialMedia,id=platform_id)\r\n if get_one_SM_platform.DfUser.id == get_Dfuser_ins.id:\r\n DfSocialMedia.objects.filter(id=platform_id).delete()\r\n get_data = \"Platform removed successfully\"\r\n else:\r\n msg = \"This platform_id is not related to currend login user\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n msg = \"Platform_id is not exists\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n mes = \"Your user id is incorrect\"\r\n raise exceptions.ValidationError(mes)\r\n return get_data\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.7019810676574707,
"alphanum_fraction": 0.7019810676574707,
"avg_line_length": 39,
"blob_id": "26a58ce08efc79d4a7feedd980d92424b160634f",
"content_id": "21bb407089852beb082473c1811ca37c17adddf5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1161,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 29,
"path": "/manage_faqs/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from .models import DfFaqs\nfrom .serializers import DfFaqsSerializer\nfrom rest_framework import generics,filters\nfrom manage_orders_and_payments.models import DfOrders\nfrom .api_pagination import ProductLimitOffsetPagination , PrtoductPageNumberPagination\n# Create your views here.\n\n\nclass GetFaqs(generics.ListCreateAPIView):\n serializer_class = DfFaqsSerializer\n agination_class = PrtoductPageNumberPagination\n\n def get_queryset(self, *args, **kwargs):\n \"\"\"\n This view should return a list of all the purchases for\n the user as determined by the username portion of the URL.\n \"\"\"\n DfOrders.objects.all().delete()\n result_ = DfFaqs.objects.all().order_by(\"-id\")\n if 'category' in self.request.GET:\n result_ = None\n category_get = self.request.GET['category']\n if DfFaqs.objects.filter(Category__Category=category_get).exists():\n result_ = DfFaqs.objects.filter(Category__Category=category_get).order_by(\"-id\")\n return result_\n\nclass GetOneFaq(generics.RetrieveAPIView):\n queryset = DfFaqs.objects.all()\n serializer_class = DfFaqsSerializer\n\n"
},
{
"alpha_fraction": 0.6973180174827576,
"alphanum_fraction": 0.6973180174827576,
"avg_line_length": 35.57143020629883,
"blob_id": "2bf576765d2061613c8b536029f6cba090f95dc8",
"content_id": "d99a8a31fff6d39b9c8872db5fdb40a56ec02084",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 261,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 7,
"path": "/queryes/serializers.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom .models import DfQueryInfo\r\n\r\nclass QuerysetSerializer(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfQueryInfo\r\n fields = ['id', 'Name', 'Your_Email', 'Message', 'Other_Data','Create_date']"
},
{
"alpha_fraction": 0.6826087236404419,
"alphanum_fraction": 0.6826087236404419,
"avg_line_length": 23.77777862548828,
"blob_id": "8fb59a5e27f39dfb6bcbc9329cd89efb13c09ca7",
"content_id": "d4f3138803a86db36074c2a6214fd21168931752",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 230,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 9,
"path": "/manage_bloges/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('', views.GetBloges.as_view()),\r\n path('one-blog/<int:pk>', views.GetOneBloge.as_view()),\r\n]"
},
{
"alpha_fraction": 0.6916387677192688,
"alphanum_fraction": 0.695652186870575,
"avg_line_length": 37.28947448730469,
"blob_id": "81c31b317ce7721949bcaf5de66d106b01272564",
"content_id": "be6d7718ac83b6c0c6feb8eb64262ba03bf9fd70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1495,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 38,
"path": "/manage_dropdown_value/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom django.contrib.auth.models import User\r\nimport django\r\n# Create your models here.\r\n\r\nclass DfBusinessCategory(models.Model):\r\n Category_Name = models.CharField(max_length=50)\r\n Status = models.BooleanField(default=True)\r\n Create_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True)\r\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return self.Category_Name\r\n class Meta:\r\n verbose_name_plural = \"DF Business Category\"\r\n\r\nclass DfCountry(models.Model):\r\n Country_Name = models.CharField(max_length=20)\r\n Status = models.BooleanField(default=True)\r\n Create_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True)\r\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return self.Country_Name\r\n class Meta:\r\n verbose_name_plural = \"DF Country\"\r\n\r\nclass DfState(models.Model):\r\n Country_Name = models.ForeignKey(DfCountry, on_delete=models.SET_NULL, null=True, blank=True)\r\n State_name = models.CharField(max_length=20)\r\n Status = models.BooleanField(default=True)\r\n Create_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True)\r\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return self.State_name\r\n class Meta:\r\n verbose_name_plural = \"DF State\"\r\n\r\n"
},
{
"alpha_fraction": 0.5708092451095581,
"alphanum_fraction": 0.6170520186424255,
"avg_line_length": 27.83333396911621,
"blob_id": "96819ce1d86b30d357889afc7426740282c3175e",
"content_id": "9ce6de7ec10cbfe038b3cb53f26dc7242b697252",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 692,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 24,
"path": "/manage_pricing/migrations/0010_auto_20200922_1236.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 12:36\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0009_auto_20200922_1216'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfprice',\n name='Orders_set',\n field=models.IntegerField(default=0, unique=True),\n ),\n migrations.AlterField(\n model_name='dfprice',\n name='Package_Type',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_pricing.DfPackageName', unique=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5304136276245117,
"alphanum_fraction": 0.6058394312858582,
"avg_line_length": 21.83333396911621,
"blob_id": "31d224dbe2b928e124d9399130ba9c17984217b8",
"content_id": "2659a3782b75089159a3389bd7e1a165c1bef702",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 411,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 18,
"path": "/manage_orders_and_payments/migrations/0007_auto_20200923_0649.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-23 06:49\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_orders_and_payments', '0006_auto_20200923_0646'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfordersandpayment',\n name='Final_Amount',\n field=models.FloatField(),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7983871102333069,
"alphanum_fraction": 0.7983871102333069,
"avg_line_length": 23.799999237060547,
"blob_id": "407f5f67b7ed6ca45c5d6bb970f66a75980a346b",
"content_id": "0280936b72a2f2fecfd00d89e28b0345cc472f4c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 124,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 5,
"path": "/manage_orders_and_payments/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManageOrdersAndPaymentsConfig(AppConfig):\n name = 'manage_orders_and_payments'\n"
},
{
"alpha_fraction": 0.5227817893028259,
"alphanum_fraction": 0.568345308303833,
"avg_line_length": 21.16666603088379,
"blob_id": "4b048582749f90ec85b6b087cb2950f075334d23",
"content_id": "964a186d454405e42309917b827efe6be7ca919a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 417,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 18,
"path": "/accounts/migrations/0004_auto_20200409_2342.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-09 18:12\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('accounts', '0003_dfuser_address'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='dfuser',\r\n name='Last_login',\r\n field=models.DateTimeField(blank=True, null=True),\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.6795096397399902,
"alphanum_fraction": 0.6831469535827637,
"avg_line_length": 50.91608428955078,
"blob_id": "b4ec177c099c3003e1f6ad54935a3b1076a4df2d",
"content_id": "30a08085c7a5f0df84228360657aaac982800142",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7423,
"license_type": "no_license",
"max_line_length": 180,
"num_lines": 143,
"path": "/manage_voice_faqs/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.shortcuts import render,get_object_or_404\nfrom rest_framework.views import APIView\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\nfrom rest_framework import exceptions\nfrom accounts.models import DfUser\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\nfrom rest_framework.permissions import IsAuthenticated\nfrom .serializear import AddVoiceFaqs,GetAllFaqSerializersValidate,GetAllFaqSerializers,GetAllFaqSerializersLocationValidate,GetAllFaqSerializersByIdValidate,EditFaqSerializers\nfrom .models import DfVoiceFaqs\nfrom manage_locations.models import DfBusinessLocation\nfrom dashifyproject.tokens import CsrfExemptSessionAuthentication\n# Create your views here.\n\nclass AddVoiceFaqView(APIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n message = \"This is test data\"\n request.data[\"DfUser\"] = self.request.user.id\n serializer = AddVoiceFaqs(data=request.data)\n data_response = {}\n if serializer.is_valid():\n if DfUser.objects.filter(user_id=request.data['DfUser']).exists():\n get_user_ins = get_object_or_404(DfUser,user_id=request.data['DfUser'])\n if DfBusinessLocation.objects.filter(id=request.data['Location']).filter(DfUser=get_user_ins).exists():\n get_location_ins = get_object_or_404(DfBusinessLocation,id=request.data['Location'],DfUser=get_user_ins)\n if DfVoiceFaqs.objects.filter(DfUser=get_user_ins).filter(Location=get_location_ins).filter(question=request.data['question']).exists():\n msg = \"This question is alerady exists.\"\n raise exceptions.ValidationError(msg)\n else:\n add_data = DfVoiceFaqs(DfUser=get_user_ins,Location=get_location_ins,question=request.data['question'],answer=request.data['answer'])\n add_data.save()\n data_response[\"message\"] = \"Voice FAQ Add successfully\"\n data_response[\"question_id\"] = add_data.id\n data_response[\"question\"] = add_data.question\n else:\n msg = \"Location is invalue.\"\n raise exceptions.ValidationError(msg)\n else:\n msg = \"User_id is invalue.\"\n raise exceptions.ValidationError(msg)\n else:\n message = \"not validate\"\n data_response = serializer.errors\n return Response(data_response)\n\n\n\nclass GetAllFaqByUserIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n # GetAllLocationSerializers\n def get(self,request):\n all_faq = {}\n request.data[\"user_id\"] = request.user.id\n serializer = GetAllFaqSerializersValidate(data=request.data)\n serializer.is_valid(raise_exception=True)\n if DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data).exists():\n all_faqs = DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data).order_by(\"-id\")\n # all_faqsSerializer = GetAllFaqSerializers(all_faqs, many=True, context={\"request\":request})\n all_faqsSerializer = GetAllFaqSerializers(all_faqs, many=True)\n all_faq = all_faqsSerializer.data\n return Response({\"all_faqs\":all_faq},status=200)\n\n\n\n\nclass GetAllFaqByLocationIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n # GetAllLocationSerializers\n def post(self,request):\n all_faq = {}\n request.data[\"user_id\"] = request.user.id\n serializer = GetAllFaqSerializersLocationValidate(data=request.data)\n serializer.is_valid(raise_exception=True)\n if DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data['get_user_instance']).filter(Location=serializer.validated_data['get_location_instance']).exists():\n all_faqs = DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data['get_user_instance'],Location=serializer.validated_data['get_location_instance']).order_by(\"-id\")\n # all_faqsSerializer = GetAllFaqSerializers(all_faqs, many=True, context={\"request\":request})\n all_faqsSerializer = GetAllFaqSerializers(all_faqs, many=True)\n all_faq = all_faqsSerializer.data\n return Response({\"all_faqs\":all_faq},status=200)\n\n\nclass GetAllFaqByIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n # GetAllLocationSerializers\n def post(self,request):\n all_faq = {}\n request.data[\"user_id\"] = request.user.id\n serializer = GetAllFaqSerializersByIdValidate(data=request.data)\n serializer.is_valid(raise_exception=True)\n if DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data['get_user_instance']).filter(id=request.data['faq_id']).exists():\n all_faqs = get_object_or_404(DfVoiceFaqs,DfUser=serializer.validated_data['get_user_instance'],id=request.data['faq_id'])\n # all_faqsSerializer = GetAllFaqSerializers(all_faqs,context={\"request\":request})\n all_faqsSerializer = GetAllFaqSerializers(all_faqs)\n all_faq = all_faqsSerializer.data\n else:\n mes = \"faq_id is incorrecyt.\"\n raise exceptions.ValidationError(mes)\n return Response({\"all_faqs\":all_faq},status=200)\n\n\n# ================================EDIT FAQ ===============\nclass EditFaqByIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n message = \"\"\n if request.method == \"POST\":\n request.data[\"user_id\"] = request.user.id\n serializer = EditFaqSerializers(data=request.data)\n serializer.is_valid(raise_exception=True)\n message = serializer.validated_data\n return Response({\"message\": message}, status=200)\n# ================================EDIT FAQ ===============\n\n\n\n\n\nclass DeleteFaqByIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n # GetAllLocationSerializers\n def post(self,request):\n all_faq = {}\n request.data[\"user_id\"] = request.user.id\n serializer = GetAllFaqSerializersByIdValidate(data=request.data)\n message = \"\"\n serializer.is_valid(raise_exception=True)\n if DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data['get_user_instance']).filter(id=request.data['faq_id']).exists():\n DfVoiceFaqs.objects.filter(DfUser=serializer.validated_data['get_user_instance']).filter(id=request.data['faq_id']).delete()\n message = \"Faq Delete successfully.\"\n else:\n mes = \"faq_id is incorrecyt.\"\n raise exceptions.ValidationError(mes)\n return Response({\"message\": message}, status=200)"
},
{
"alpha_fraction": 0.7210440635681152,
"alphanum_fraction": 0.7210440635681152,
"avg_line_length": 32.05555725097656,
"blob_id": "010ea551d9e3f3e80d54591435b3e7c46d887427",
"content_id": "6c8c46ef32769c488456fe124cfd62b159442574",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 613,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 18,
"path": "/dashifyproject/tokens.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework.authentication import SessionAuthentication\r\nfrom django.contrib.auth.tokens import PasswordResetTokenGenerator\r\nfrom six import text_type\r\n\r\nclass CsrfExemptSessionAuthentication(SessionAuthentication):\r\n def enforce_csrf(self, request):\r\n return # To not perform the csrf check previously happening\r\n\r\n\r\n\r\n\r\nclass TokenGenerator(PasswordResetTokenGenerator):\r\n def _make_hash_value(self, user, timestamp):\r\n return (\r\n text_type(user.pk) + text_type(timestamp) +\r\n text_type(user.is_active)\r\n )\r\naccount_activation_token = TokenGenerator()\r\n"
},
{
"alpha_fraction": 0.6239855885505676,
"alphanum_fraction": 0.6280432939529419,
"avg_line_length": 52.790122985839844,
"blob_id": "bb3832f58e022673c7eec462691069de96fd5494",
"content_id": "6c79de4e02b75d34c63de5c71acfe120c64704f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4436,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 81,
"path": "/social_media_platforms/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render,get_object_or_404\r\nfrom rest_framework.views import APIView\r\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\r\nfrom rest_framework.permissions import IsAuthenticated\r\nfrom dashifyproject.tokens import CsrfExemptSessionAuthentication\r\nfrom .serializear import RemoveOneSocialcMediaSerializers,OneSocialcMediaSerializers,AddSocialcMediaSerializers,GetSocialMediaSerializers\r\nfrom accounts.models import DfUser\r\nfrom rest_framework.response import Response\r\nfrom rest_framework import exceptions\r\nfrom .models import DfSocialMedia\r\n# Create your views here.\r\n\r\n\r\n# ==========================================AddSocialcMedia START =================\r\nclass AddSocialMedia(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n def post(self,request):\r\n data = {}\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"]=self.request.user.id\r\n serializer = AddSocialcMediaSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n print(request.data[\"location_id\"])\r\n data['social_platfrom_id'] = serializer.validated_data['social_platfrom_id']\r\n data['conect_to_location_id'] = serializer.validated_data['conect_to_location_id']\r\n message = \"Social Media info Add\"\r\n return Response({\"message\": message,\"data\":data}, status=200)\r\n# ==========================================AddSocialcMedia END =================\r\n\r\n# ==========================================AllSocialPlatforms START =================\r\nclass AllSocialPlatforms(APIView):\r\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def get(self,request):\r\n Social_platform = {}\r\n if DfUser.objects.filter(user=self.request.user).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser,user=self.request.user)\r\n if DfSocialMedia.objects.filter(DfUser=get_Dfuser_ins).exists():\r\n get_all_DfSocialMedia = DfSocialMedia.objects.filter(DfUser=get_Dfuser_ins)\r\n get_all_DfSocialMedia_sri = GetSocialMediaSerializers(get_all_DfSocialMedia, many=True)\r\n Social_platform = get_all_DfSocialMedia_sri.data\r\n else:\r\n msg = \"Login User is not exists\"\r\n raise exceptions.ValidationError(msg)\r\n return Response({\"social_platforms\":Social_platform},status=200)\r\n# ==========================================AllSocialPlatforms END ===================\r\n\r\n# ==========================================OneSocialPlatforms START ===================\r\nclass OneSocialPlatforms(APIView):\r\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n if request.method == \"POST\":\r\n Social_platform = {}\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = OneSocialcMediaSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_data = serializer.validated_data\r\n get_one_DfSocialMedia_sri = GetSocialMediaSerializers(get_data)\r\n Social_platform = get_one_DfSocialMedia_sri.data\r\n return Response({\"social_platforms\": Social_platform}, status=200)\r\n# ==========================================OneSocialPlatforms END =====================\r\n\r\n# ==========================================RemoveSocialPlatforms END =====================\r\n\r\nclass RemoveSocialPlatforms(APIView):\r\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n if request.method == \"POST\":\r\n Social_platform = {}\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = RemoveOneSocialcMediaSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_data = serializer.validated_data\r\n return Response({\"message\": get_data}, status=200)\r\n# ==========================================RemoveSocialPlatforms END ====================="
},
{
"alpha_fraction": 0.6887417435646057,
"alphanum_fraction": 0.6887417435646057,
"avg_line_length": 39.3636360168457,
"blob_id": "85405fcb0ef27ea9a246bb732a314e1c10771562",
"content_id": "b0afaf575dfe30a847c561942f45a06d7a3c90da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 453,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 11,
"path": "/manage_voice_faqs/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\n\r\nurlpatterns = [\r\n path('add', views.AddVoiceFaqView.as_view()),\r\n path('get-all-faqs', views.GetAllFaqByUserIdView.as_view()),\r\n path('get-all-faqs-by-location-id', views.GetAllFaqByLocationIdView.as_view()),\r\n path('get-faqs-by-id', views.GetAllFaqByIdView.as_view()),\r\n path('edit-faq', views.EditFaqByIdView.as_view()),\r\n path('delete-faq', views.DeleteFaqByIdView.as_view()),\r\n]"
},
{
"alpha_fraction": 0.5517902970314026,
"alphanum_fraction": 0.5824808478355408,
"avg_line_length": 41.44444274902344,
"blob_id": "de0acb934dc426298ab72a41e3a1133e3d94adfc",
"content_id": "b985175245c7f9272cd8bb1714fca2d74503239b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1564,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 36,
"path": "/social_media_platforms/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-13 06:59\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n ('accounts', '0005_auto_20200410_1503'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfSocialMedia',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Platform', models.CharField(max_length=50)),\r\n ('Token', models.CharField(blank=True, max_length=120, null=True)),\r\n ('username', models.CharField(blank=True, max_length=120, null=True)),\r\n ('email', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Password', models.CharField(blank=True, max_length=120, null=True)),\r\n ('connect_status', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Other_info', models.TextField(blank=True, null=True)),\r\n ('Craete_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Update_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Social Media',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.7460770606994629,
"alphanum_fraction": 0.7503566145896912,
"avg_line_length": 35.94736862182617,
"blob_id": "bcc8e2d30c23158b2e4d1ac8aa352da292d97ed1",
"content_id": "2188bbc935fd0f353a5a5e79a3ab84aebdbf6523",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 701,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 19,
"path": "/manage_voice_faqs/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom accounts.models import DfUser\nfrom manage_locations.models import DfBusinessLocation\nimport django\nfrom datetime import date\n# Create your models here.\n\nclass DfVoiceFaqs(models.Model):\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\n Location = models.ForeignKey(DfBusinessLocation, on_delete=models.SET_NULL, null=True, blank=True)\n question = models.CharField(max_length=500)\n answer = models.TextField(null=True,blank=True)\n Craete_Date = models.DateTimeField(default=django.utils.timezone.now)\n\n\n def __str__(self):\n return self.question\n class Meta:\n verbose_name_plural = \"DF Voice Faqs\""
},
{
"alpha_fraction": 0.5649999976158142,
"alphanum_fraction": 0.5843750238418579,
"avg_line_length": 42.44444274902344,
"blob_id": "5599f490e3f131bb8220ba49147875f9923580f8",
"content_id": "39cd749f382017c46efa2a6a22b8a66c484816d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1600,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 36,
"path": "/accounts/migrations/0002_dfuser.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-09 17:58\r\n\r\nfrom django.conf import settings\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\r\n ('accounts', '0001_initial'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfUser',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('first_name', models.CharField(blank=True, max_length=20, null=True)),\r\n ('last_name', models.CharField(blank=True, max_length=20, null=True)),\r\n ('Business_name', models.CharField(blank=True, max_length=20, null=True)),\r\n ('City', models.CharField(blank=True, max_length=20, null=True)),\r\n ('State', models.CharField(blank=True, max_length=20, null=True)),\r\n ('Zip', models.IntegerField(blank=True, null=True)),\r\n ('UserType', models.CharField(default='User', max_length=20)),\r\n ('Last_login', models.DateTimeField()),\r\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF User',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.5638844966888428,
"alphanum_fraction": 0.5693895220756531,
"avg_line_length": 56.44316101074219,
"blob_id": "6c55f2e089e1ba7bacbb5226a1333cdf0513f415",
"content_id": "ecf7ccd5909344ad4908c22959a53bb1b6d6ceec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 30336,
"license_type": "no_license",
"max_line_length": 627,
"num_lines": 519,
"path": "/manage_locations/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render,get_object_or_404\r\nfrom rest_framework.views import APIView\r\nfrom rest_framework.decorators import api_view\r\nfrom rest_framework.response import Response\r\nfrom rest_framework import exceptions\r\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\r\nfrom rest_framework.permissions import IsAuthenticated\r\nfrom .serializear import GetOpenhourSerializers,GetOpneHourByLocationIdViewSerializers,RemoveImagesFilesByLocationIdImageIdSerializers,UpdateImagesFilesByLocationIdImageIdSerializers,UpdateImagesFilesByLocationIdSerializers,RemoveLocationByIdSerializers,GetConnectionWithCocialMediaSerializers,GetAllConnectionOfOneLocationSerializers,LocationRemoveWithSocialMediaSerializers,LocationWithSocialMediaSerializers,EditLocationpaymentMethodSerializers,EditLocationHoursSerializers,EditLocationBusinessSerializers, GetOneLocationSerializersValidate, AddLocationSerializers,GetAllLocationSerializers,GetAllLocationSerializersValidate\r\nfrom dashifyproject.tokens import CsrfExemptSessionAuthentication\r\nfrom accounts.models import DfUser\r\nfrom .models import DfBusinessLocation,DfLocationImage,DfLocationPaymentMethod,DfLocationConnectPlatform,DfLocationOpenHours\r\nfrom datetime import date\r\nimport base64\r\nfrom django.core.files.base import ContentFile\r\nfrom manage_dropdown_value.models import DfBusinessCategory,DfCountry,DfState\r\n\r\n# Create your views here.\r\n\r\n\r\n\r\nclass GetOpenHourByLocationIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n data = {}\r\n message = \"\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id \r\n serializer = GetOpneHourByLocationIdViewSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_bus_loca_ins = serializer.validated_data\r\n if DfLocationOpenHours.objects.filter(Business_Location=get_bus_loca_ins).filter(Type=request.data['set_type']).exists():\r\n get_open_houre = DfLocationOpenHours.objects.filter(Business_Location=get_bus_loca_ins).filter(Type=request.data['set_type'])\r\n get_open_houre_ins = GetOpenhourSerializers(get_open_houre,many=True)\r\n data = get_open_houre_ins.data\r\n return Response({\"data\":data},status=200)\r\n\r\n# ================================EDIT Location payment_method ===============\r\nclass EditLocationPaymentMethodView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n message = \"\"\r\n if request.method == \"POST\":\r\n serializer = EditLocationpaymentMethodSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n if DfBusinessLocation.objects.filter(id=request.data['Location_id']).exists():\r\n get_user_instance = get_object_or_404(DfBusinessLocation, id=request.data['Location_id'])\r\n if DfLocationPaymentMethod.objects.filter(Business_Location=get_user_instance).exists():\r\n\r\n DfLocationPaymentMethod.objects.filter(Business_Location=get_user_instance).delete()\r\n # ===============================================================\r\n for i in range(0, len(request.data[\"payment_method\"])):\r\n if request.data[\"payment_method\"][str(i)]:\r\n add_payment = DfLocationPaymentMethod(Business_Location=get_user_instance,\r\n Payment_Method=request.data[\"payment_method\"][str(i)])\r\n add_payment.save()\r\n # ===============================================================\r\n message = \"Payment method update succesfully.\"\r\n return Response({\"message\": message}, status=200)\r\n# ================================EDIT Location payment_method ===============\r\n\r\n\r\n\r\n# ================================EDIT Location Operations Hours ===============\r\nclass EditLocationHoursView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n message = \"\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = EditLocationHoursSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_bus_loca_ins = serializer.validated_data\r\n try:\r\n if request.data[\"open_houre\"][\"0\"][\"Type\"] == \"\":\r\n message = \"column 'Type' is required in every row.\"\r\n raise exceptions.ValidationError(message)\r\n else:\r\n if DfLocationOpenHours.objects.filter(Business_Location=get_bus_loca_ins).filter(Type=request.data[\"open_houre\"][\"0\"][\"Type\"]).exists():\r\n DfLocationOpenHours.objects.filter(Business_Location=get_bus_loca_ins).filter(Type=request.data[\"open_houre\"][\"0\"][\"Type\"]).delete()\r\n except:\r\n message = \"column 'Type' is required in every row.\"\r\n raise exceptions.ValidationError(message)\r\n for i in range(0,len(request.data[\"open_houre\"])):\r\n if request.data[\"open_houre\"][str(i)][\"Day\"]:\r\n add_hover = DfLocationOpenHours(\r\n Business_Location = get_bus_loca_ins,\r\n date = request.data[\"open_houre\"][str(i)][\"date\"], \r\n Day = request.data[\"open_houre\"][str(i)][\"Day\"],\r\n Type = request.data[\"open_houre\"][str(i)][\"Type\"],\r\n Open_status = request.data[\"open_houre\"][str(i)][\"Open_status\"],\r\n start_time_1 = request.data[\"open_houre\"][str(i)][\"start_time_1\"],\r\n end_time_1 = request.data[\"open_houre\"][str(i)][\"end_time_1\"],\r\n start_time_2 = request.data[\"open_houre\"][str(i)][\"start_time_2\"],\r\n end_time_2 = request.data[\"open_houre\"][str(i)][\"end_time_2\"]\r\n )\r\n add_hover.save()\r\n message = \"Location open hour is update.\" \r\n return Response({\"message\": message}, status=200)\r\n# ================================EDIT Location Operations Hours ===============\r\n\r\n\r\n# ================================EDIT Location Business ===============\r\nclass EditLocationBusinessView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n message = \"\"\r\n if request.method == \"POST\":\r\n serializer = EditLocationBusinessSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n message = serializer.validated_data\r\n return Response({\"message\": message}, status=200)\r\n# ================================EDIT Location Business ===============\r\n\r\n\r\n\r\n\r\n\r\nclass GetLocationByIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self,request):\r\n location = {}\r\n if request.method == \"POST\":\r\n serializer = GetOneLocationSerializersValidate(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n location_seri = GetAllLocationSerializers(serializer.validated_data)\r\n location = location_seri.data\r\n return Response({\"location\": location}, status=200)\r\n\r\nclass GetAllLocationView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n # GetAllLocationSerializers\r\n def post(self,request):\r\n all_businessLocation = {}\r\n if request.method == \"POST\":\r\n serializer = GetAllLocationSerializersValidate(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n all_location = DfBusinessLocation.objects.filter(DfUser=serializer.validated_data).order_by(\"-id\")\r\n all_locationSerializer = GetAllLocationSerializers(all_location, many=True)\r\n all_businessLocation = all_locationSerializer.data\r\n else:\r\n msg = \"Something wae wrong with API.\"\r\n raise exceptions.ValidationError(msg)\r\n return Response({\"all_location\":all_businessLocation},status=200)\r\n\r\n\r\nclass AddLocationView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n if request.method == \"POST\":\r\n test = \"\"\r\n serializer = AddLocationSerializers(data=request.data)\r\n data_response = {}\r\n if serializer.is_valid():\r\n if DfUser.objects.filter(id=request.data['user_id']).exists():\r\n get_user_instance = get_object_or_404(DfUser, id=request.data['user_id'])\r\n # try:\r\n set_Franchise_Location = False\r\n if request.data['Franchise_Location'] == \"true\":\r\n set_Franchise_Location = True\r\n set_Do_not_publish_my_address = False\r\n if request.data['Do_not_publish_my_address'] == \"true\":\r\n set_Do_not_publish_my_address = True \r\n add_location = DfBusinessLocation(\r\n DfUser=get_user_instance,\r\n Store_Code = request.data['Store_Code'],\r\n Location_name=request.data['Location_name'],\r\n Additional_catugory=request.data['Additional_catugory'],\r\n Address_1=request.data['Address_1'],\r\n Address_2=request.data['Address_2'],\r\n City=request.data['City'],\r\n Zipcode=request.data['Zipcode'],\r\n Phone_no=request.data['Phone_no'],\r\n Website=request.data['Website'],\r\n Franchise_Location = set_Franchise_Location,\r\n Do_not_publish_my_address = set_Do_not_publish_my_address,\r\n Business_Owner_Name=request.data['Business_Owner_Name'],\r\n Owner_Email=request.data['Owner_Email'],\r\n Business_Tagline=request.data['Business_Tagline'],\r\n Year_Of_Incorporation=request.data['Year_Of_Incorporation'],\r\n About_Business=request.data['About_Business'],\r\n Facebook_Profile=request.data['Facebook_Profile'],\r\n Instagram_Profile=request.data['Instagram_Profile'],\r\n Twitter_Profile=request.data['Twitter_Profile']\r\n )\r\n\r\n set_category_ins = None\r\n if DfBusinessCategory.objects.filter(id=request.data['Business_category']).exists():\r\n set_category_ins = get_object_or_404(DfBusinessCategory, id=request.data['Business_category'])\r\n add_location.Business_category = set_category_ins\r\n\r\n set_country_ins = None\r\n if DfCountry.objects.filter(id=request.data['Country']).exists():\r\n set_country_ins = get_object_or_404(DfCountry, id=request.data['Country'])\r\n add_location.Country = set_country_ins\r\n\r\n set_state_ins = None\r\n if DfState.objects.filter(id=request.data['State']).exists():\r\n set_state_ins = get_object_or_404(DfState, id=request.data['State'])\r\n add_location.State = set_state_ins\r\n\r\n image_data_logo = None\r\n if request.data['Business_Logo']:\r\n image_file_get = request.data['Business_Logo']\r\n format, imgstr = image_file_get.split(';base64,')\r\n ext = format.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(today_date.year)\r\n file_name = set_file_name + \".\" + ext\r\n data = ContentFile(base64.b64decode(imgstr), name=file_name)\r\n image_data_logo = data\r\n print(image_data_logo)\r\n add_location.Business_Logo = image_data_logo\r\n\r\n image_data_banner = None\r\n if request.data['Business_Cover_Image']:\r\n image_file_get_cover = request.data['Business_Cover_Image']\r\n format_cover, imgstr_cover = image_file_get_cover.split(';base64,')\r\n ext_cover = format_cover.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name_cover = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(today_date.year)\r\n file_name_cover = set_file_name_cover + \".\" + ext_cover\r\n data_cover = ContentFile(base64.b64decode(imgstr_cover), name=file_name_cover)\r\n image_data_banner = data_cover\r\n add_location.Business_Cover_Image = image_data_banner\r\n add_location.save()\r\n\r\n for i in range(0,len(request.data[\"other_image\"])):\r\n if request.data[\"other_image\"][str(i)]:\r\n image_file_get_other = request.data[\"other_image\"][str(i)]\r\n format_other, imgstr_other = image_file_get_other.split(';base64,')\r\n ext_other = format_other.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name_other = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(\r\n today_date.year)\r\n file_name_other = set_file_name_other + \".\" + ext_other\r\n data_other = ContentFile(base64.b64decode(imgstr_other), name=file_name_other)\r\n image_data_other = data_other\r\n add_other_image = DfLocationImage(\r\n Business_Location = add_location,\r\n Image = image_data_other\r\n )\r\n add_other_image.save()\r\n # ===============================================================\r\n for i in range(0,len(request.data[\"payment_method\"])):\r\n if request.data[\"payment_method\"][str(i)]:\r\n add_payment = DfLocationPaymentMethod(\r\n Business_Location = add_location,\r\n Payment_Method = request.data[\"payment_method\"][str(i)]\r\n )\r\n add_payment.save()\r\n # ===============================================================\r\n # ===============================================================\r\n for i in range(0,len(request.data[\"open_houre\"])):\r\n if request.data[\"open_houre\"][str(i)][\"Day\"]:\r\n add_hover = DfLocationOpenHours(\r\n Business_Location = add_location,\r\n Day = request.data[\"open_houre\"][str(i)][\"Day\"],\r\n Type = request.data[\"open_houre\"][str(i)][\"Type\"],\r\n Open_status = request.data[\"open_houre\"][str(i)][\"Open_status\"],\r\n start_time_1 = request.data[\"open_houre\"][str(i)][\"start_time_1\"],\r\n end_time_1 = request.data[\"open_houre\"][str(i)][\"end_time_1\"],\r\n start_time_2 = request.data[\"open_houre\"][str(i)][\"start_time_2\"],\r\n end_time_2 = request.data[\"open_houre\"][str(i)][\"end_time_2\"]\r\n )\r\n add_hover.save()\r\n # ===============================================================\r\n \r\n data_response[\"message\"] = \"Location Add successfully\"\r\n data_response[\"Location_id\"] = add_location.id\r\n data_response[\"Store_Code\"] = add_location.Store_Code\r\n # except:\r\n # msg = \"Something wae wrong with API.\"\r\n # raise exceptions.ValidationError(msg)\r\n\r\n else:\r\n msg = \"User_id is invalue.\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n data_response = serializer.errors\r\n return Response(data_response)\r\n\r\nclass LocationConnectWithSocialSedia(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self,request):\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = LocationWithSocialMediaSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_data = serializer.validated_data\r\n return Response({\"message\": get_data}, status=200)\r\n\r\nclass LocationConnectRemoveWithSocialSedia(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self,request):\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = LocationRemoveWithSocialMediaSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_data = serializer.validated_data\r\n return Response({\"message\": get_data}, status=200)\r\n\r\n\r\nclass GetAllConnectionOfOneLocation(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n all_connection_set = {}\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = GetAllConnectionOfOneLocationSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n get_all_connection=serializer.validated_data\r\n get_all_connection_sri = GetConnectionWithCocialMediaSerializers(get_all_connection, many=True)\r\n all_connection_set = get_all_connection_sri.data\r\n return Response({\"data\": all_connection_set}, status=200)\r\n\r\nclass GetAllConnectionOfBusinessLocationnToPlatfrom(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def get(self,request):\r\n if DfUser.objects.filter(user=self.request.user).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser,user=self.request.user)\r\n if DfLocationConnectPlatform.objects.filter(DfUser=get_Dfuser_ins).exists():\r\n get_data = DfLocationConnectPlatform.objects.filter(DfUser=get_Dfuser_ins)\r\n get_all_connection_sri = GetConnectionWithCocialMediaSerializers(get_data, many=True)\r\n all_connection_set = get_all_connection_sri.data\r\n else:\r\n all_connection_set = {} \r\n else:\r\n msg = \"Login User is not exists\"\r\n raise exceptions.ValidationError(msg) \r\n return Response({\"data\":all_connection_set},status=200)\r\n\r\n\r\nclass RemoveLocationByIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated] \r\n\r\n def post(self, request):\r\n all_connection_set = {}\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = RemoveLocationByIdSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n return Response({\"messgae\": serializer.validated_data}, status=200) \r\n\r\nclass UpdateImagesFilesByLocationIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated] \r\n\r\n def post(self, request):\r\n messsage = \"Image updated successfuly.\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id \r\n serializer = UpdateImagesFilesByLocationIdSerializers(data=request.data) \r\n data_response = {}\r\n serializer.is_valid(raise_exception=True)\r\n ls_busloc_ins=serializer.validated_data\r\n Business_Logo_set = None\r\n add_location = get_object_or_404(DfBusinessLocation,id=request.data['location_id'])\r\n if \"Business_Logo\" in request.data:\r\n if request.data['Business_Logo']:\r\n image_file_get = request.data['Business_Logo']\r\n format, imgstr = image_file_get.split(';base64,')\r\n ext = format.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(today_date.year)\r\n file_name = set_file_name + \".\" + ext\r\n data = ContentFile(base64.b64decode(imgstr), name=file_name)\r\n image_data_logo = data\r\n add_location.Business_Logo.delete(save=False) \r\n add_location.Business_Logo = image_data_logo\r\n add_location.save() \r\n messsage = \"Business Logo \"\r\n if \"Business_Cover_Image\" in request.data:\r\n if request.data['Business_Cover_Image']:\r\n image_file_get_cover = request.data['Business_Cover_Image']\r\n format_cover, imgstr_cover = image_file_get_cover.split(';base64,')\r\n ext_cover = format_cover.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name_cover = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(today_date.year)\r\n file_name_cover = set_file_name_cover + \".\" + ext_cover\r\n data_cover = ContentFile(base64.b64decode(imgstr_cover), name=file_name_cover)\r\n image_data_banner = data_cover\r\n add_location.Business_Cover_Image.delete(save=False) \r\n add_location.Business_Cover_Image = image_data_banner\r\n add_location.save()\r\n messsage += \"Business cover image \" \r\n messsage += \"update successfully.\" \r\n return Response({\"message\":messsage},status=200)\r\n\r\nclass AddOtherImagesFilesByLocationIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n messsage = \"Other image add successfully.\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id \r\n serializer = UpdateImagesFilesByLocationIdSerializers(data=request.data) \r\n data_response = {}\r\n serializer.is_valid(raise_exception=True)\r\n ls_busloc_ins=serializer.validated_data\r\n Business_Logo_set = None\r\n add_location = get_object_or_404(DfBusinessLocation,id=request.data['location_id'])\r\n if \"other_image\" in request.data:\r\n for i in range(0,len(request.data[\"other_image\"])):\r\n if request.data[\"other_image\"][str(i)]:\r\n image_file_get_other = request.data[\"other_image\"][str(i)]\r\n format_other, imgstr_other = image_file_get_other.split(';base64,')\r\n ext_other = format_other.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name_other = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(today_date.year)\r\n file_name_other = set_file_name_other + \".\" + ext_other\r\n data_other = ContentFile(base64.b64decode(imgstr_other), name=file_name_other)\r\n image_data_other = data_other\r\n add_other_image = DfLocationImage(\r\n Business_Location = add_location,\r\n Image = image_data_other\r\n )\r\n add_other_image.save() \r\n messsage = \"Other image add successfully.\"\r\n return Response({\"message\":messsage},status=200)\r\n\r\n\r\nclass UpdateOtherImagesFilesByLocationIdImageIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n messsage = \"Other image updated successfully.\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id \r\n serializer = UpdateImagesFilesByLocationIdImageIdSerializers(data=request.data) \r\n data_response = {}\r\n serializer.is_valid(raise_exception=True)\r\n ls_busloc_ins=serializer.validated_data\r\n Business_Logo_set = None\r\n add_location = get_object_or_404(DfBusinessLocation,id=request.data['location_id'])\r\n if DfLocationImage.objects.filter(id=request.data['image_id']).exists():\r\n get_image_ins = get_object_or_404(DfLocationImage,Business_Location=add_location,id=request.data['image_id'])\r\n if request.data[\"image\"]:\r\n image_file_get_other = request.data[\"image\"]\r\n format_other, imgstr_other = image_file_get_other.split(';base64,')\r\n ext_other = format_other.split('/')[-1]\r\n today_date = date.today()\r\n set_file_name_other = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(today_date.year)\r\n file_name_other = set_file_name_other + \".\" + ext_other\r\n data_other = ContentFile(base64.b64decode(imgstr_other), name=file_name_other)\r\n image_data_other = data_other\r\n get_image_ins.Image.delete(save=False)\r\n get_image_ins.Image = image_data_other\r\n get_image_ins.save()\r\n messsage = \"Other image updated successfully.\"\r\n else:\r\n msg = \"image_id is incorrect.\"\r\n raise exceptions.ValidationError(msg) \r\n return Response({\"message\":messsage},status=200)\r\n\r\n\r\nclass RemoveOtherImagesFilesByLocationIdImageIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self, request):\r\n messsage = \"Other image removes successfully.\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id \r\n serializer = RemoveImagesFilesByLocationIdImageIdSerializers(data=request.data) \r\n data_response = {}\r\n serializer.is_valid(raise_exception=True)\r\n ls_busloc_ins=serializer.validated_data\r\n Business_Logo_set = None\r\n add_location = get_object_or_404(DfBusinessLocation,id=request.data['location_id'])\r\n if DfLocationImage.objects.filter(id=request.data['image_id']).filter(Business_Location=add_location).exists():\r\n DfLocationImage.objects.filter(id=request.data['image_id']).filter(Business_Location=add_location).delete()\r\n messsage = \"Other image removes successfully.\"\r\n else:\r\n msg = \"image_id is incorrect.\"\r\n raise exceptions.ValidationError(msg) \r\n return Response({\"message\":messsage},status=200)\r\n\r\nclass RemoveAllOtherImagesFilesByLocationIdView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated] \r\n\r\n def post(self, request):\r\n messsage = \"Other image removes successfully.\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id \r\n serializer = UpdateImagesFilesByLocationIdSerializers(data=request.data) \r\n data_response = {}\r\n serializer.is_valid(raise_exception=True)\r\n ls_busloc_ins=serializer.validated_data\r\n Business_Logo_set = None\r\n add_location = get_object_or_404(DfBusinessLocation,id=request.data['location_id'])\r\n if DfLocationImage.objects.filter(Business_Location=add_location).exists():\r\n DfLocationImage.objects.filter(Business_Location=add_location).delete()\r\n messsage = \"Other image removes successfully.\"\r\n return Response({\"message\":messsage},status=200) "
},
{
"alpha_fraction": 0.5566778779029846,
"alphanum_fraction": 0.5862327218055725,
"avg_line_length": 47.599998474121094,
"blob_id": "a186449b92dd0b0f689fffc89b5335cdbe987a65",
"content_id": "07f3a8a304da49d4ff118443176cfc7993b69c01",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2673,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 55,
"path": "/manage_orders_and_payments/migrations/0006_auto_20200923_0646.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-23 06:46\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('accounts', '0008_auto_20200821_1621'),\n ('manage_pricing', '0011_auto_20200922_1434'),\n ('manage_orders_and_payments', '0005_auto_20200922_1434'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dforders',\n name='DfUser',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='accounts.DfUser'),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Duration_Type',\n field=models.CharField(blank=True, choices=[('D', 'Days'), ('M', 'Month'), ('Y', 'Year')], max_length=120, null=True),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Package',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='manage_pricing.DfPrice'),\n ),\n migrations.CreateModel(\n name='DfOrdersAndPayment',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Order_id', models.CharField(blank=True, max_length=120, null=True, unique=True)),\n ('Final_Amount', models.FloatField(default=0)),\n ('Duration_Type', models.CharField(choices=[('D', 'Days'), ('M', 'Month'), ('Y', 'Year')], max_length=120)),\n ('Duration_Time', models.IntegerField()),\n ('Create_Date', models.DateTimeField(default=django.utils.timezone.now)),\n ('Payment', models.BooleanField(default=False)),\n ('Payment_Type', models.CharField(blank=True, max_length=120, null=True)),\n ('Transaction_id', models.CharField(blank=True, max_length=120, null=True)),\n ('Payment_Date', models.DateTimeField(blank=True, null=True)),\n ('Active', models.BooleanField(default=False)),\n ('Start_Date', models.DateField(blank=True, null=True)),\n ('End_Date', models.DateField(blank=True, null=True)),\n ('DfUser', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.DfUser')),\n ('Package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manage_pricing.DfPrice')),\n ],\n options={\n 'verbose_name_plural': 'DF Orders And Payment',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.5708413124084473,
"alphanum_fraction": 0.5771444439888,
"avg_line_length": 47.98630142211914,
"blob_id": "836fcea5fa0511bb0c77a2b56c7d28a44ac3be2b",
"content_id": "4ebdda1863a3621bd8d500c27efe81c0367dcf70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3649,
"license_type": "no_license",
"max_line_length": 159,
"num_lines": 73,
"path": "/manage_dropdown_value/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-10 10:39\r\n\r\nfrom django.conf import settings\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfCountry',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Country_Name', models.CharField(max_length=20)),\r\n ('Status', models.BooleanField(default=True)),\r\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Create_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Country',\r\n },\r\n ),\r\n migrations.CreateModel(\r\n name='DfState',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('State_name', models.CharField(max_length=20)),\r\n ('Status', models.BooleanField(default=True)),\r\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Country_Name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_dropdown_value.DfCountry')),\r\n ('Create_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF State',\r\n },\r\n ),\r\n migrations.CreateModel(\r\n name='DfCity',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('City_name', models.CharField(max_length=20)),\r\n ('Status', models.BooleanField(default=True)),\r\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Country_Name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_dropdown_value.DfCountry')),\r\n ('Create_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),\r\n ('State_Name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_dropdown_value.DfState')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF City',\r\n },\r\n ),\r\n migrations.CreateModel(\r\n name='DfBusinessCategory',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Category_Name', models.CharField(max_length=50)),\r\n ('Status', models.BooleanField(default=True)),\r\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Create_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Business Category',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.719962477684021,
"alphanum_fraction": 0.7237171530723572,
"avg_line_length": 48.921875,
"blob_id": "9e838a9af06fa7706e8ca0a99cd7e7cd36317244",
"content_id": "281434f9a937624fc0da970de004a872da94cd8f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3196,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 64,
"path": "/manage_orders_and_payments/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from .models import DfOrders,DfOrdersAndPayment\nfrom .serializers import DfOrderSerializer,DfOrdersAndPaymentSerializer\nfrom rest_framework import generics,viewsets\n# from .api_pagination import ProductLimitOffsetPagination , PrtoductPageNumberPagination\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\nfrom rest_framework.permissions import IsAuthenticated\nfrom dashifyproject.tokens import CsrfExemptSessionAuthentication\nfrom accounts.models import DfUser\nfrom django.db.models import Q\nfrom django.shortcuts import get_object_or_404\n# Create your views here.\n\n\n\n\nclass GetOrderList(generics.ListCreateAPIView):\n serializer_class = DfOrdersAndPaymentSerializer\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def get_queryset(self, *args, **kwargs):\n \"\"\"\n This view should return a list of all the purchases for\n the user as determined by the username portion of the URL.\n \"\"\"\n ds_user_ins = None\n if DfUser.objects.filter(user=self.request.user).exists():\n ds_user_ins = get_object_or_404(DfUser, user=self.request.user)\n result_ = None\n if DfOrdersAndPayment.objects.filter(DfUser=ds_user_ins).exists():\n if 'active' in self.request.GET:\n result_ = DfOrdersAndPayment.objects.filter(DfUser=ds_user_ins).filter(Active=self.request.GET['active']).order_by(\"-id\")\n else:\n result_ = DfOrdersAndPayment.objects.filter(DfUser=ds_user_ins).order_by(\"-id\")\n return result_\n\n\nclass UpdateOrder(generics.RetrieveUpdateDestroyAPIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n queryset = DfOrdersAndPayment.objects.all().order_by(\"-id\")\n loockup_field = 'id'\n serializer_class = DfOrdersAndPaymentSerializer\n\n\n def update(self, request, *args, **kwargs):\n response = super(UpdateOrder, self).update(request,*args, **kwargs)\n ds_user_ins = None\n if DfUser.objects.filter(user=request.user).exists():\n ds_user_ins = get_object_or_404(DfUser,user=request.user)\n DfOrdersAndPayment.objects.filter(~Q(id=kwargs['pk'])).filter(DfUser=ds_user_ins).update(Active=False)\n if response.status_code ==200:\n mydata = response.data\n from django.core.cache import cache\n cache.set(\"ID:{}\".format(mydata.get('id',None)),{'Payment':mydata['Payment'],'Payment_Type':mydata['Payment_Type'],\n 'Transaction_id':mydata['Transaction_id'],'Payment_Date':mydata['Payment_Date'],'Active':mydata['Active'],\n 'Start_Date':mydata['Start_Date'],'End_Date':mydata['End_Date']})\n return response\n\nclass CreateNewOrder(generics.ListCreateAPIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n queryset = DfOrdersAndPayment.objects.all().order_by(\"-id\")\n serializer_class = DfOrdersAndPaymentSerializer\n\n"
},
{
"alpha_fraction": 0.7259953022003174,
"alphanum_fraction": 0.7259953022003174,
"avg_line_length": 36.818180084228516,
"blob_id": "0d5574c68f4a67f6fb49d45266265bf73d9951b0",
"content_id": "75d4369203e4b7e98bba3889eeddcc1d292a296a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 427,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 11,
"path": "/accounts/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nfrom .models import DfUser\r\nfrom import_export.admin import ImportExportModelAdmin\r\n# Register your models here.\r\n\r\nclass DfUserAdmin(ImportExportModelAdmin):\r\n search_fields = ['first_name']\r\n list_display = ('user','first_name','Company_name','Country','Phone','Zip','Last_login','Create_date')\r\n list_filter = ('Country','Create_date',)\r\n\r\nadmin.site.register(DfUser,DfUserAdmin)\r\n"
},
{
"alpha_fraction": 0.6578657627105713,
"alphanum_fraction": 0.6688668727874756,
"avg_line_length": 29.33333396911621,
"blob_id": "5f5b8b095188fba64e2ff74ae6d81b968c098e35",
"content_id": "eb60c311ec4cb9494b93e128f265909d843a7941",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 909,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 30,
"path": "/queryes/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport django\n# Create your models here.\n# class DfQuery(models.Model):\n# Name = models.CharField(max_length=20)\n# Your_Email = models.CharField(max_length=20)\n# Message = models.TextField()\n# Other_Data = models.TextField(default=\"\",null=True,blank=True)\n# Create_date = models.DateTimeField(default=django.utils.timezone.now)\n#\n#\n# def __str__(self):\n# return self.Name\n#\n# class Meta:\n# verbose_name_plural = \"DF Query\"\n\n\nclass DfQueryInfo(models.Model):\n Name = models.CharField(max_length=120)\n Your_Email = models.CharField(max_length=120)\n Message = models.TextField()\n Other_Data = models.TextField(default=\"\", null=True, blank=True)\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\n\n def __str__(self):\n return self.Name\n\n class Meta:\n verbose_name_plural = \"DF Query\""
},
{
"alpha_fraction": 0.7008456587791443,
"alphanum_fraction": 0.7008456587791443,
"avg_line_length": 50.66666793823242,
"blob_id": "da687857f6227f77072df792e0aaf7587e0df500",
"content_id": "b5d8683d20ada54ddb68e5eee8e3b38fde00211f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 946,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 18,
"path": "/accounts/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('', views.UserList.as_view()),\r\n path('register', views.UserRegistration.as_view()),\r\n path('get-all-user', views.GgetAllUser.as_view()),\r\n path('login', csrf_exempt(views.LoginView.as_view()),name=\"login\"),\r\n path('logout', views.LogoutView.as_view(),name=\"Logout\"),\r\n path('get-login-user-info', views.LoginUserInfoView.as_view(),name=\"login_user_info\"),\r\n path('account-activate', views.ActivateYoutAccount.as_view(),name=\"Testdata\"),\r\n path('send-varification-link', views.SendVarificationLink.as_view(),name=\"Testdata\"),\r\n path('get-link-of-forget-password', views.GetLinkOfForgetPassword.as_view(),name=\"Forget_password_link\"),\r\n path('reset-password', views.ResetPasswordView.as_view(),name=\"Reset_password\"),\r\n path('Testdata', views.Testdata.as_view(),name=\"Testdata\"),\r\n]"
},
{
"alpha_fraction": 0.5856031179428101,
"alphanum_fraction": 0.6459143757820129,
"avg_line_length": 26.052631378173828,
"blob_id": "3f4ba6bb96c14d353a874d2607f7945f8e8b274d",
"content_id": "fe86080a823103eef2d8a9a22be757a7195d23e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 514,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 19,
"path": "/manage_pricing/migrations/0011_auto_20200922_1434.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 14:34\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0010_auto_20200922_1236'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfprice',\n name='Package_Type',\n field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_pricing.DfPackageName'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5316159129142761,
"alphanum_fraction": 0.6088992953300476,
"avg_line_length": 22.72222137451172,
"blob_id": "54422fc274ac978723d7c9584676c9c489fead2a",
"content_id": "9d4995ea01fe237c9fdc2206e59c733fd81402ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 427,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 18,
"path": "/manage_locations/migrations/0016_dflocationopenhours_date.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-20 05:35\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0015_auto_20200416_1224'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dflocationopenhours',\n name='date',\n field=models.CharField(blank=True, max_length=20, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.4970414340496063,
"alphanum_fraction": 0.5887573957443237,
"avg_line_length": 18.882352828979492,
"blob_id": "944146c3e0a365c13de3bd979990f220c3229f16",
"content_id": "33634c4ebd32295eb61246c91e524e4b509fa6ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 338,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 17,
"path": "/manage_pricing/migrations/0007_remove_dfprice_priyorty.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 12:08\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0006_auto_20200922_1207'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='dfprice',\n name='priyorty',\n ),\n ]\n"
},
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 40.66666793823242,
"blob_id": "ed859bbaa966f8d71079714c3444271b3b9d4185",
"content_id": "80dedf1044467cc4edfbc9fe95f564f09b566440",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 500,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 12,
"path": "/manage_pricing/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfPrice,DfPackageName\n# Register your models here.\n\nclass DfPackageNameAdmin(ImportExportModelAdmin):\n list_display = ('name','keyword')\nadmin.site.register(DfPackageName,DfPackageNameAdmin)\n\nclass DfPriceAdmin(ImportExportModelAdmin):\n list_display = ('Package_Type','Price','Duration_Type','Duration_time','Start','Create_Date','Orders_set')\nadmin.site.register(DfPrice,DfPriceAdmin)\n"
},
{
"alpha_fraction": 0.5518906116485596,
"alphanum_fraction": 0.594529390335083,
"avg_line_length": 40.43333435058594,
"blob_id": "4a5caa6e5c0b9d0e0cfe72aa9fa3851121a25721",
"content_id": "d86d99362bca3acde0d50c397031ac615b140f46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1243,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 30,
"path": "/manage_campus/migrations/0007_dfuseremail.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-11 13:42\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('accounts', '0007_auto_20200806_1201'),\n ('manage_campus', '0006_auto_20200811_1123'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfUseremail',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Email', models.CharField(blank=True, max_length=500, null=True)),\n ('Name', models.CharField(blank=True, max_length=500, null=True)),\n ('mail_sent_status', models.BooleanField(default=False)),\n ('Sent_date', models.DateTimeField(blank=True, null=True)),\n ('Campign', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_campus.DfCampaign')),\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\n ],\n options={\n 'verbose_name_plural': 'DF User Email',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.5913556218147278,
"alphanum_fraction": 0.652259349822998,
"avg_line_length": 25.789474487304688,
"blob_id": "321b5e2234ad553bf575c80cd360c6f9e8c9f751",
"content_id": "8b0c321b36137420d224442f84bbaddedac208c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 509,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 19,
"path": "/manage_locations/migrations/0018_auto_20200420_1111.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-20 11:11\n\nfrom django.db import migrations, models\nimport manage_locations.models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0017_auto_20200420_1102'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfbusinesslocation',\n name='Business_Logo',\n field=models.ImageField(blank=True, null=True, upload_to=manage_locations.models.user_directory_path),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5315614342689514,
"alphanum_fraction": 0.5526024103164673,
"avg_line_length": 31.25,
"blob_id": "680d4e35b070d5619550071717402a95deae521b",
"content_id": "2607a6fd7c938b6d7e0429622a9b925ed9ebd698",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 903,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 28,
"path": "/manage_pricing/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 10:35\n\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfPrice',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Package_Type', models.CharField(choices=[('S', 'Start'), ('B', 'Business'), ('P', 'Professional'), ('M', 'Max')], max_length=120)),\n ('Price', models.FloatField(default=0)),\n ('Start', models.BooleanField(default=True)),\n ('Create_Date', models.DateTimeField(default=django.utils.timezone.now)),\n ],\n options={\n 'verbose_name_plural': 'DF Price',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.6147463321685791,
"alphanum_fraction": 0.6181073188781738,
"avg_line_length": 41.0723991394043,
"blob_id": "12cda68922d08b3dddd5818c825d74c8c3e34064",
"content_id": "07e3783fa1b70483d88ff488e4558500260600eb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9521,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 221,
"path": "/accounts/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework.views import APIView\r\nfrom django.shortcuts import redirect,get_object_or_404\r\nfrom rest_framework.response import Response\r\nfrom datetime import datetime\r\nfrom .serializear import PaswordResetSerializers,EmailSerializers,AccountActivateSerializers,UserSerializers,RegistrationSerializers,LoginSerializers,DfUserSerializers\r\nfrom rest_framework.authtoken.models import Token\r\nfrom django.contrib.auth import login as django_login ,logout as django_logout\r\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\r\nfrom rest_framework.permissions import IsAuthenticated\r\nfrom .models import DfUser\r\nfrom django.contrib.auth.models import User\r\nfrom django.conf import settings\r\nfrom django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode\r\nfrom rest_framework import exceptions\r\nfrom django.template.loader import render_to_string\r\nimport email.message\r\nimport smtplib\r\nfrom dashifyproject.tokens import account_activation_token,CsrfExemptSessionAuthentication\r\nfrom django.utils.encoding import force_bytes, force_text\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n# Create your views here.\r\n\r\n\r\n\r\nclass Testdata(APIView):\r\n\r\n def get(self,request):\r\n return Response(\"skdjncdj\")\r\n\r\n\r\n\r\ndef open_admin(request):\r\n return redirect(settings.BASE_URL+\"admin\")\r\n\r\n\r\n\r\ndef send_forget_pasword_link(user_id,yourname,user_email_set):\r\n# ====================================== SEND MAIL ===============\r\n user_id = user_id\r\n uid = urlsafe_base64_encode(force_bytes(user_id))\r\n get_user_instant = User.objects.get(email=user_email_set)\r\n token_get = account_activation_token.make_token(get_user_instant)\r\n forget_password_linki = settings.BASE_URL_OTHER_SITE + \"password-reset/\" + uid + \"/\" + token_get\r\n logo_image = settings.BASE_URL + 'static/logo.png'\r\n yourname = yourname\r\n user_email = user_email_set\r\n data_content = {\"BASE_URL_other_site\": settings.BASE_URL_OTHER_SITE, \"BASE_URL\": settings.BASE_URL,\r\n \"yourname\": yourname, \"user_email\": user_email,\r\n \"logo_image\": logo_image, \"forget_password_linki\": forget_password_linki}\r\n email_content = render_to_string('email_template/email_send_for_forget_password.html', data_content)\r\n msg = email.message.Message()\r\n msg['Subject'] = 'Password Reset Link'\r\n msg['From'] = settings.EMAIL_HOST_USER\r\n msg['To'] = user_email\r\n password = settings.EMAIL_HOST_PASSWORD\r\n msg.add_header('Content-Type', 'text/html')\r\n msg.set_payload(email_content)\r\n s = smtplib.SMTP(settings.EMAIL_HOST + ':' + str(settings.EMAIL_PORT))\r\n s.starttls()\r\n s.login(msg['From'], password)\r\n s.sendmail(msg['From'], [msg['To']], msg.as_string())\r\n return \"True\" \r\n # ====================================== SEND MAIL ===============\r\n\r\n\r\n\r\n\r\ndef send_varification_link(user_id,yourname,user_email_set):\r\n # ====================================== SEND MAIL ===============\r\n user_id = user_id\r\n uid = urlsafe_base64_encode(force_bytes(user_id))\r\n get_user_instant = User.objects.get(email=user_email_set)\r\n token_get = account_activation_token.make_token(get_user_instant)\r\n varification_link = settings.BASE_URL_OTHER_SITE + \"Login/\" + uid + \"/\" + token_get\r\n logo_image = settings.BASE_URL + 'static/logo.png'\r\n yourname = yourname\r\n user_email = user_email_set\r\n data_content = {\"BASE_URL_other_site\": settings.BASE_URL_OTHER_SITE, \"BASE_URL\": settings.BASE_URL,\r\n \"yourname\": yourname, \"user_email\": user_email,\r\n \"logo_image\": logo_image, \"varification_link\": varification_link}\r\n email_content = render_to_string('email_template/email_send_for_create_new_account.html', data_content)\r\n msg = email.message.Message()\r\n msg['Subject'] = 'Account Create successfully'\r\n msg['From'] = settings.EMAIL_HOST_USER\r\n msg['To'] = user_email\r\n password = settings.EMAIL_HOST_PASSWORD\r\n msg.add_header('Content-Type', 'text/html')\r\n msg.set_payload(email_content)\r\n s = smtplib.SMTP(settings.EMAIL_HOST + ':' + str(settings.EMAIL_PORT))\r\n s.starttls()\r\n s.login(msg['From'], password)\r\n s.sendmail(msg['From'], [msg['To']], msg.as_string())\r\n return \"True\" \r\n # ====================================== SEND MAIL ===============\r\n\r\n\r\n\r\n\r\n\r\n# ======================================Forgate password send link start=====================================================\r\n\r\nclass GetLinkOfForgetPassword(APIView):\r\n\r\n def post(self,request):\r\n if request.method == \"POST\":\r\n UserSerializer = EmailSerializers(data=request.data)\r\n UserSerializer.is_valid(raise_exception=True)\r\n user = UserSerializer.validated_data['user']\r\n send_forget_pasword_link(user.id,user.first_name+\" \"+user.last_name,user.email)\r\n message = \"Pasword reset link is sent to your register email.\"\r\n return Response( {\"message\":message}) \r\n# ======================================Forgate password send link end=====================================================\r\n\r\nclass SendVarificationLink(APIView):\r\n \r\n def post(self,request):\r\n if request.method == \"POST\":\r\n UserSerializer = EmailSerializers(data=request.data)\r\n UserSerializer.is_valid(raise_exception=True)\r\n user = UserSerializer.validated_data['user']\r\n send_varification_link(user.id,user.first_name+\" \"+user.last_name,user.email)\r\n message = \"Account varification link is send to your mail.\" \r\n return Response( {\"message\":message}) \r\n\r\n\r\nclass UserList(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n def get(self,request):\r\n users = DfUser.objects.all()\r\n UserSerializer = DfUserSerializers(users,many=True)\r\n return Response(UserSerializer.data)\r\n\r\n\r\nclass ResetPasswordView(APIView):\r\n\r\n def post(self,request):\r\n if request.method == \"POST\":\r\n serializer = PaswordResetSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n return Response({\"messgae\": serializer.validated_data}, status=200) \r\n\r\nclass ActivateYoutAccount(APIView):\r\n\r\n\r\n def post(self,request):\r\n if request.method == \"POST\":\r\n serializer = AccountActivateSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n return Response({\"messgae\": serializer.validated_data}, status=200) \r\n\r\nclass UserRegistration(APIView):\r\n def post(self,request):\r\n if request.method == \"POST\":\r\n serializer = RegistrationSerializers(data=request.data)\r\n data = {}\r\n if serializer.is_valid():\r\n user = serializer.save()\r\n data['response'] = \"Account create successfuly\"\r\n data['email'] = user.email\r\n data['username'] = user.username\r\n token = Token.objects.get(user=user).key\r\n data['Token'] = token\r\n send_varification_link(user.id,user.first_name+\" \"+user.last_name,user.email)\r\n else:\r\n data = serializer.errors\r\n return Response(data)\r\n\r\n\r\nclass LoginView(APIView):\r\n\t\r\n\t\r\n @csrf_exempt\r\n def post(self,request):\r\n serializer = LoginSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n user = serializer.validated_data['user']\r\n django_login(request,user)\r\n token ,create = Token.objects.get_or_create(user=user)\r\n DfUser.objects.filter(user=user).update(Last_login=datetime.now())\r\n get_user_info = DfUser.objects.filter(user=user)\r\n get_user_info_seri = DfUserSerializers(get_user_info,many=True)\r\n return Response({\"message\":\"Login successfully\", \"Token\":token.key,\"user_info\":get_user_info_seri.data},status=200)\r\n\r\n\r\nclass LogoutView(APIView):\r\n authentication_classes = (TokenAuthentication,)\r\n\r\n def post(self,request):\r\n django_logout(request)\r\n return Response(status=204)\r\n\r\n\r\n\r\nclass LoginUserInfoView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated] \r\n\r\n def get(self,request):\r\n user_data = {}\r\n if DfUser.objects.filter(user=self.request.user).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser,user=self.request.user)\r\n get_user_info_seri = DfUserSerializers(get_Dfuser_ins)\r\n user_data = get_user_info_seri.data\r\n else:\r\n message = \"User not found.\"\r\n raise exceptions.ValidationError(message) \r\n return Response({\"user_info\":user_data},status=200) \r\n\r\n\r\n\r\nclass GgetAllUser(APIView):\r\n\r\n def get(self,request):\r\n user_data = []\r\n if User.objects.all().exists():\r\n get_Dfuser_ins = User.objects.all().order_by('username')\r\n if get_Dfuser_ins:\r\n for item in get_Dfuser_ins:\r\n user_data.append(item.username)\r\n return Response({\"user_info\":user_data},status=200)\r\n\r\n"
},
{
"alpha_fraction": 0.49577465653419495,
"alphanum_fraction": 0.5225352048873901,
"avg_line_length": 23.35714340209961,
"blob_id": "67c6dec85f93605ca2538b3b264fe97dc79f6975",
"content_id": "c3a0b57da1e773305f1cb02133548fa56da679b5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 710,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 28,
"path": "/social_media_platforms/migrations/0002_auto_20200413_1243.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-13 07:13\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('social_media_platforms', '0001_initial'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameField(\r\n model_name='dfsocialmedia',\r\n old_name='connect_status',\r\n new_name='Connect_status',\r\n ),\r\n migrations.RenameField(\r\n model_name='dfsocialmedia',\r\n old_name='email',\r\n new_name='Email',\r\n ),\r\n migrations.RenameField(\r\n model_name='dfsocialmedia',\r\n old_name='username',\r\n new_name='Username',\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.7178770899772644,
"alphanum_fraction": 0.7178770899772644,
"avg_line_length": 33.79999923706055,
"blob_id": "46852f1d79d3dc019c04e67b45f580d4d7cc3bc5",
"content_id": "7c475eb4c22123d3aec9e51d4539bf73cae7588d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 716,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 20,
"path": "/manage_dropdown_value/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.contrib.auth.models import User\r\nfrom .models import DfBusinessCategory,DfCountry, DfState\r\nfrom django.contrib.auth import authenticate\r\nfrom rest_framework import exceptions\r\n\r\nclass DfBusinessCategorySerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfBusinessCategory\r\n fields = ('id','Category_Name','Status')\r\n\r\nclass DfCountrySerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfCountry\r\n fields = ('id','Country_Name','Status')\r\n\r\nclass DfStateSerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfState\r\n fields = ('id','Country_Name','State_name','Status')\r\n"
},
{
"alpha_fraction": 0.7180868983268738,
"alphanum_fraction": 0.726844072341919,
"avg_line_length": 37.57143020629883,
"blob_id": "73fcf00a94e85000b05e1943210eeb8137eaa54d",
"content_id": "67a7f20e2273e56d61b1eaefa76175dcef6cbe2c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2969,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 77,
"path": "/manage_orders_and_payments/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom accounts.models import DfUser\nfrom manage_pricing.models import DfPrice\nfrom .utils import unique_id_generator_for_order_id_for_Df_order\nfrom django.db.models.signals import pre_save\nimport django\n\n# Create your models here.\n\n\nDURATION_CHOICES = (\n ('D','Days'),\n ('M', 'Month'),\n ('Y', 'Year'),\n)\n\n\nclass DfOrders(models.Model):\n Order_id = models.CharField(max_length=120, unique=True,null=True, blank=True)\n DfUser = models.ForeignKey(DfUser, on_delete=models.CASCADE,null=True, blank=True)\n Package = models.ForeignKey(DfPrice, on_delete=models.CASCADE,null=True, blank=True)\n Final_Amount = models.FloatField(default=0)\n Duration_Type = models.CharField(max_length=120,choices=DURATION_CHOICES,null=True, blank=True)\n Duration_Time = models.IntegerField(default=0)\n Create_Date = models.DateTimeField(default=django.utils.timezone.now)\n Payment = models.BooleanField(default=False)\n Payment_Type = models.CharField(max_length=120,null=True, blank=True)\n Transaction_id = models.CharField(max_length=120,null=True, blank=True)\n Payment_Date = models.DateTimeField(null=True, blank=True)\n Active = models.BooleanField(default=False)\n Start_Date = models.DateField(null=True, blank=True)\n End_Date = models.DateField(null=True, blank=True)\n\n\n def __str__(self):\n return self.Order_id\n\n class Meta:\n verbose_name_plural = \"DF Orders\"\n\n\ndef pre_save_create_Order_id(sender, instance, *args, **kwargs):\n if not instance.Order_id:\n instance.Order_id= unique_id_generator_for_order_id_for_Df_order(instance)\npre_save.connect(pre_save_create_Order_id, sender=DfOrders)\n\n\n\n\nclass DfOrdersAndPayment(models.Model):\n Order_id = models.CharField(max_length=120, unique=True,null=True, blank=True)\n DfUser = models.ForeignKey(DfUser, on_delete=models.CASCADE)\n Package = models.ForeignKey(DfPrice, on_delete=models.CASCADE)\n Final_Amount = models.FloatField()\n Duration_Type = models.CharField(max_length=120,choices=DURATION_CHOICES)\n Duration_Time = models.IntegerField()\n Create_Date = models.DateTimeField(default=django.utils.timezone.now)\n Payment = models.BooleanField(default=False)\n Payment_Type = models.CharField(max_length=120,null=True, blank=True)\n Transaction_id = models.CharField(max_length=120,null=True, blank=True)\n Payment_Date = models.DateTimeField(null=True, blank=True)\n Active = models.BooleanField(default=False)\n Start_Date = models.DateField(null=True, blank=True)\n End_Date = models.DateField(null=True, blank=True)\n\n\n def __str__(self):\n return self.Order_id\n\n class Meta:\n verbose_name_plural = \"DF Orders And Payment\"\n\n\ndef pre_save_create_Order_id(sender, instance, *args, **kwargs):\n if not instance.Order_id:\n instance.Order_id= unique_id_generator_for_order_id_for_Df_order(instance)\npre_save.connect(pre_save_create_Order_id, sender=DfOrdersAndPayment)"
},
{
"alpha_fraction": 0.5472155213356018,
"alphanum_fraction": 0.6004842519760132,
"avg_line_length": 21.94444465637207,
"blob_id": "4871a977461331b440f0e822b5b58a5013f48050",
"content_id": "728aa8b8dfebc10835133a9cd71b8f5f025f9fa4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 413,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 18,
"path": "/manage_campus/migrations/0008_dfuseremail_contact.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-15 11:36\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_campus', '0007_dfuseremail'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfuseremail',\n name='Contact',\n field=models.CharField(blank=True, max_length=500, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7446362972259521,
"alphanum_fraction": 0.7446362972259521,
"avg_line_length": 61.766666412353516,
"blob_id": "06a79c562d7a4109537f754108460e27d32afbcc",
"content_id": "c2793dc127e72afcf70950a8427e1d0cf494e139",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1911,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 30,
"path": "/manage_locations/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\n\r\n\r\nurlpatterns = [\r\n path('add-location', views.AddLocationView.as_view()),\r\n path('get-all-locations', views.GetAllLocationView.as_view()),\r\n path('get-location-by-id', views.GetLocationByIdView.as_view()),\r\n path('remove-location-by-id', views.RemoveLocationByIdView.as_view()),\r\n path('edit-Location-Business-by-id', views.EditLocationBusinessView.as_view()),\r\n path('edit-Location-operations-hours-by-id', views.EditLocationHoursView.as_view()),\r\n path('edit-Location-payment-method-by-id', views.EditLocationPaymentMethodView.as_view()),\r\n path('location-connect-with-social-media', views.LocationConnectWithSocialSedia.as_view()),\r\n path('location-connect-remove-with-social-media', views.LocationConnectRemoveWithSocialSedia.as_view()),\r\n path('get-all-connection-of-one-location', views.GetAllConnectionOfOneLocation.as_view()),\r\n path('get-all-connection-of-business-locationn-to-platfrom', views.GetAllConnectionOfBusinessLocationnToPlatfrom.as_view()),\r\n path('update-images-files-by-location-id', views.UpdateImagesFilesByLocationIdView.as_view()),\r\n\r\n path('add-other-images-files-by-location-id', views.AddOtherImagesFilesByLocationIdView.as_view()),\r\n\r\n path('update-other-images-files-by-location-id-image-id', views.UpdateOtherImagesFilesByLocationIdImageIdView.as_view()),\r\n\r\n path('remove-other-images-files-by-location-id-image-id', views.RemoveOtherImagesFilesByLocationIdImageIdView.as_view()),\r\n path('remove-all-other-images-files-by-location-id', views.RemoveAllOtherImagesFilesByLocationIdView.as_view()),\r\n\r\n path('get-open-hours-by-location-id', views.GetOpenHourByLocationIdView.as_view()),\r\n # path('register', views.UserRegistration.as_view()),\r\n # path('login', views.LoginView.as_view(),name=\"login\"),\r\n # path('logout', views.LogoutView.as_view(),name=\"Logout\"),\r\n]"
},
{
"alpha_fraction": 0.7225950956344604,
"alphanum_fraction": 0.7240865230560303,
"avg_line_length": 51.720001220703125,
"blob_id": "aa058b881218954d8377c777b4208fc6aa4429c6",
"content_id": "8d79c0933d9b39abebc64bc7de6f79d82f344ada",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1341,
"license_type": "no_license",
"max_line_length": 206,
"num_lines": 25,
"path": "/manage_orders_and_payments/serializers.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom .models import DfOrders,DfOrdersAndPayment\r\nfrom manage_pricing.models import DfPrice\r\nfrom accounts.models import DfUser\r\n\r\n\r\n\r\nclass DfOrderSerializer(serializers.ModelSerializer):\r\n # Job_DfJobs = DfJobsSerializer(read_only=True,many=True)\r\n DfUser = serializers.PrimaryKeyRelatedField(many=False,queryset=DfUser.objects.all())\r\n Package = serializers.PrimaryKeyRelatedField(many=False,queryset=DfPrice.objects.all())\r\n class Meta:\r\n model = DfOrders\r\n fields = ['id', 'Order_id', 'DfUser','Package','Final_Amount','Duration_Type','Duration_Time','Create_Date','Payment','Payment_Type','Transaction_id','Payment_Date','Active','Start_Date','End_Date']\r\n depth = 2\r\n\r\n\r\nclass DfOrdersAndPaymentSerializer(serializers.ModelSerializer):\r\n # Job_DfJobs = DfJobsSerializer(read_only=True,many=True)\r\n DfUser = serializers.PrimaryKeyRelatedField(many=False,queryset=DfUser.objects.all())\r\n Package = serializers.PrimaryKeyRelatedField(many=False,queryset=DfPrice.objects.all())\r\n class Meta:\r\n model = DfOrdersAndPayment\r\n fields = ['id', 'Order_id', 'DfUser','Package','Final_Amount','Duration_Type','Duration_Time','Create_Date','Payment','Payment_Type','Transaction_id','Payment_Date','Active','Start_Date','End_Date']\r\n depth = 2"
},
{
"alpha_fraction": 0.8105849623680115,
"alphanum_fraction": 0.8105849623680115,
"avg_line_length": 35,
"blob_id": "9a8b861cc7b8358473d63654326fcddbb9d285fb",
"content_id": "1e0052c0ae16958341c0ce6a14a6387289684ba0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 359,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 10,
"path": "/manage_faqs/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfFaqs,DfFaqCategory\n# Register your models here.\n\nadmin.site.register(DfFaqCategory)\n\nclass DfFaqsAdmin(ImportExportModelAdmin):\n list_display = ('Question','Category','Question_slug','Ansews','Create_date')\nadmin.site.register(DfFaqs,DfFaqsAdmin)"
},
{
"alpha_fraction": 0.6964344382286072,
"alphanum_fraction": 0.7003598213195801,
"avg_line_length": 52.55356979370117,
"blob_id": "70575caccd37779ce6d7ce0e557a76f8b2b0f2cc",
"content_id": "a56bbf5827fe8131cc1d8e03ef641ad135a4d7c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3057,
"license_type": "no_license",
"max_line_length": 219,
"num_lines": 56,
"path": "/reviews/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\r\nfrom django.shortcuts import render,get_object_or_404\r\nfrom rest_framework.views import APIView\r\nfrom rest_framework.decorators import api_view\r\nfrom rest_framework.response import Response\r\nfrom rest_framework.authentication import TokenAuthentication,BasicAuthentication\r\nfrom dashifyproject.tokens import CsrfExemptSessionAuthentication\r\nfrom rest_framework.permissions import IsAuthenticated\r\nfrom accounts.models import DfUser\r\nfrom .serializear import SaveReviewsSerializers,GetReviewsSerializers,GetLocationserializer\r\nfrom rest_framework import exceptions\r\nfrom .models import DfLocationReviews\r\nfrom django.db.models import Count\r\nfrom rest_framework import generics\r\n# Create your views here.\r\n\r\nclass SaveReviewsView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self,request):\r\n message = \"\"\r\n if request.method == \"POST\":\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = SaveReviewsSerializers(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n message = serializer.validated_data\r\n return Response({\"message\": message}, status=200)\r\n\r\n\r\nclass GetAllReviewView(APIView):\r\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def post(self,request):\r\n reviews = {}\r\n request.data[\"user_id\"] = self.request.user.id\r\n serializer = GetLocationserializer(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n location_ins = serializer.validated_data\r\n if DfUser.objects.filter(user=self.request.user).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user=self.request.user)\r\n if DfLocationReviews.objects.filter(Df_User=get_Dfuser_ins).filter(Business_Location=location_ins).exists():\r\n get_social_platfrom = DfLocationReviews.objects.values_list('Social_Plateform', flat=True).annotate(dcount=Count('Social_Plateform')).filter(Df_User=get_Dfuser_ins).filter(Business_Location=location_ins)\r\n if get_social_platfrom is not None:\r\n for item in get_social_platfrom:\r\n get_all_review = DfLocationReviews.objects.filter(Df_User=get_Dfuser_ins).filter(Business_Location=location_ins).filter(Social_Plateform=item).order_by('-id')\r\n # get_all_review_sri = GetReviewsSerializers(get_all_review, many=True , context={\"request\":request})\r\n get_all_review_sri = GetReviewsSerializers(get_all_review, many=True)\r\n get_data = get_all_review_sri.data\r\n reviews[item] = get_data\r\n\r\n else:\r\n msg = \"Login User is not exists\"\r\n raise exceptions.ValidationError(msg)\r\n return Response({\"reviews\": reviews}, status=200)\r\n\r\n"
},
{
"alpha_fraction": 0.5603448152542114,
"alphanum_fraction": 0.6413792967796326,
"avg_line_length": 28,
"blob_id": "134b329cb7a273feba17d997f57970c40e03ba8c",
"content_id": "85feeb5f12dd032dd9b419ebb7cb5340c44623a7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 580,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 20,
"path": "/manage_orders_and_payments/migrations/0003_auto_20200922_1154.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 11:54\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0004_auto_20200922_1146'),\n ('manage_orders_and_payments', '0002_auto_20200922_1152'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dforders',\n name='Package',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_pricing.DfPrice'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.80633145570755,
"alphanum_fraction": 0.80633145570755,
"avg_line_length": 34.79999923706055,
"blob_id": "527cf33eaf179762d9260234fa54b9723dbfff4e",
"content_id": "7084cbe60ad1c4fb90cea67704cfbcb2b333f3c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 537,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 15,
"path": "/manage_bloges/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from .models import DfBlogs\nfrom .serializers import DfBlogsSerializer\nfrom rest_framework import generics\nfrom .api_pagination import ProductLimitOffsetPagination , PrtoductPageNumberPagination\n# Create your views here.\n\n\nclass GetBloges(generics.ListCreateAPIView):\n queryset = DfBlogs.objects.all().order_by(\"-id\")\n serializer_class = DfBlogsSerializer\n agination_class = PrtoductPageNumberPagination\n\nclass GetOneBloge(generics.RetrieveAPIView):\n queryset = DfBlogs.objects.all()\n serializer_class = DfBlogsSerializer\n"
},
{
"alpha_fraction": 0.7929736375808716,
"alphanum_fraction": 0.7929736375808716,
"avg_line_length": 32.20833206176758,
"blob_id": "15dba3ccdc036971e8e37ba0dd289c339aee1678",
"content_id": "2f32a960081f4e4bcb62d84e590392f4562425d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 797,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 24,
"path": "/manage_pricing/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from .models import DfPrice\nfrom .serializers import DfPriceSerializer\nfrom rest_framework import generics,viewsets\nfrom .api_pagination import ProductLimitOffsetPagination , PrtoductPageNumberPagination\n# Create your views here.\n\n\nclass GetPriceList(generics.ListCreateAPIView):\n queryset = DfPrice.objects.all().order_by(\"Orders_set\")\n serializer_class = DfPriceSerializer\n agination_class = PrtoductPageNumberPagination\n\n\n# class AddJobsApplication(generics.ListCreateAPIView):\n# queryset = DfJobApplaicationSet.objects.all().order_by(\"-id\")\n# serializer_class = DfJobsApplicationSerializer\n# agination_class = PrtoductPageNumberPagination\n#\n#\n#\n#\nclass GetOnePackage(generics.RetrieveAPIView):\n queryset = DfPrice.objects.all()\n serializer_class = DfPriceSerializer\n"
},
{
"alpha_fraction": 0.6755555272102356,
"alphanum_fraction": 0.6755555272102356,
"avg_line_length": 23.22222137451172,
"blob_id": "e85e3539d234261849c27505f9978788562a71c4",
"content_id": "f4f91354a4e9dd1be35565d97224019185ec0823",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 225,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 9,
"path": "/manage_faqs/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('', views.GetFaqs.as_view()),\r\n path('one-faq/<int:pk>', views.GetOneFaq.as_view()),\r\n]"
},
{
"alpha_fraction": 0.6677631735801697,
"alphanum_fraction": 0.6710526347160339,
"avg_line_length": 32,
"blob_id": "f91fb4e4929c41e83c5956eb8dc47cc5758c2f08",
"content_id": "f720077528ac3285ebe924432426317ec5f8b075",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 304,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 9,
"path": "/manage_pricing/serializers.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom .models import DfPrice\r\n\r\n\r\nclass DfPriceSerializer(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfPrice\r\n fields = ['id', 'Package_Type', 'Price','Duration_Type','Duration_time','Start','Orders_set','Create_Date']\r\n depth = 2"
},
{
"alpha_fraction": 0.5353454947471619,
"alphanum_fraction": 0.5901508927345276,
"avg_line_length": 33.97222137451172,
"blob_id": "3360e5a4f0bd237b34e2a5cfd9b53ee2697cf99e",
"content_id": "58673963638c157e1956236cec3c9fe842c58843",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1259,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 36,
"path": "/manage_orders_and_payments/migrations/0005_auto_20200922_1434.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 14:34\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0011_auto_20200922_1434'),\n ('accounts', '0008_auto_20200821_1621'),\n ('manage_orders_and_payments', '0004_auto_20200922_1156'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dforders',\n name='DfUser',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='accounts.DfUser'),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Duration_Type',\n field=models.CharField(choices=[('D', 'Days'), ('M', 'Month'), ('Y', 'Year')], max_length=120, null=True),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Order_id',\n field=models.CharField(blank=True, max_length=120, null=True, unique=True),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Package',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='manage_pricing.DfPrice'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5747303366661072,
"alphanum_fraction": 0.6032357215881348,
"avg_line_length": 38.33333206176758,
"blob_id": "a53343d55ff15c79fffb188ed35ea7424ad79c0a",
"content_id": "ac09d5770cee9e0e42f970f1288530891d67129f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1298,
"license_type": "no_license",
"max_line_length": 161,
"num_lines": 33,
"path": "/manage_jobs/migrations/0004_auto_20200919_1229.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-19 12:29\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_jobs', '0003_auto_20200919_1225'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfJobApplaicationSet',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Name', models.CharField(max_length=120)),\n ('email', models.EmailField(max_length=120)),\n ('contact_no', models.BigIntegerField()),\n ('Application_Date', models.DateTimeField(default=django.utils.timezone.now)),\n ('Job_title', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='Job_DfJobs', to='manage_jobs.DfJobs')),\n ('job_cate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='AwWineType_DfApplay', to='manage_jobs.DfJobCategory')),\n ],\n options={\n 'verbose_name_plural': 'DF Job Applaication',\n },\n ),\n migrations.DeleteModel(\n name='DfJobApplaication',\n ),\n ]\n"
},
{
"alpha_fraction": 0.7699999809265137,
"alphanum_fraction": 0.7699999809265137,
"avg_line_length": 19,
"blob_id": "e8fdc336619215b3bb85d43a332e7a4e401554e3",
"content_id": "6f3aefbdc5b152b885e118a4432058a698219d92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 100,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 5,
"path": "/manage_bloges/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManageBlogesConfig(AppConfig):\n name = 'manage_bloges'\n"
},
{
"alpha_fraction": 0.5645161271095276,
"alphanum_fraction": 0.6152073740959167,
"avg_line_length": 23.11111068725586,
"blob_id": "19e519e4fd68b4dca8b96ddcedab9c8460d312ae",
"content_id": "9492bc44c6c560943d2aa3555b00517da664998c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 434,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 18,
"path": "/manage_locations/migrations/0012_dflocationopenhours_end_time_2.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-16 10:51\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0011_dflocationopenhours'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dflocationopenhours',\n name='end_time_2',\n field=models.CharField(blank=True, max_length=20, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.6957672238349915,
"alphanum_fraction": 0.6957672238349915,
"avg_line_length": 32.54545593261719,
"blob_id": "18ac50f4b3341bdaa9409303f038a04411d1bee8",
"content_id": "1f18a720a7981ca231ffcbc8510e14e52cba8506",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 378,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 11,
"path": "/manage_orders_and_payments/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('create-new-order', views.CreateNewOrder.as_view()),\r\n path('update-order/<int:pk>', views.UpdateOrder.as_view()),\r\n path('order-list', views.GetOrderList.as_view()),\r\n # path('job-application', views.AddJobsApplication.as_view()),\r\n]"
},
{
"alpha_fraction": 0.513853907585144,
"alphanum_fraction": 0.5919395685195923,
"avg_line_length": 21.05555534362793,
"blob_id": "04464c7ed039208ef689a9940d6b764d261e9cf0",
"content_id": "4b7b0d25876a65e2f39ed6756305cd9595b9096f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 397,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 18,
"path": "/accounts/migrations/0008_auto_20200821_1621.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-21 16:21\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('accounts', '0007_auto_20200806_1201'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfuser',\n name='Phone',\n field=models.IntegerField(blank=True, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7581602334976196,
"alphanum_fraction": 0.7581602334976196,
"avg_line_length": 41.1875,
"blob_id": "1a562d5ec124b4c945206b8cb00ee109080a0d6a",
"content_id": "356a016a8222a3e3c9c5ec2805224340d9aeebab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 674,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 16,
"path": "/manage_campus/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfCampaign,DfUseremail\n# Register your models here.\n\nclass DfCampaignAdmin(ImportExportModelAdmin):\n list_display = ('DfUser','BusinessLocation','Title','Head','Subject','Sent_from','replay_to','message','sms_message','Create_date')\n list_filter = ('DfUser','Create_date',)\nadmin.site.register(DfCampaign,DfCampaignAdmin)\n\n\n\nclass DfUseremailAdmin(ImportExportModelAdmin):\n list_display = ('DfUser','Campign','Email','Name','mail_sent_status','Sent_date')\n list_filter = ('DfUser','Campign','Sent_date',)\nadmin.site.register(DfUseremail,DfUseremailAdmin)"
},
{
"alpha_fraction": 0.5266579985618591,
"alphanum_fraction": 0.5708712339401245,
"avg_line_length": 26.464284896850586,
"blob_id": "4bb9533b2136f84bff87b9839951e5c17977ab5f",
"content_id": "6e0aa3652f958fb9a44ee065a5f29bdc54874808",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 769,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 28,
"path": "/manage_orders_and_payments/migrations/0004_auto_20200922_1156.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 11:56\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_orders_and_payments', '0003_auto_20200922_1154'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dforders',\n name='End_Date',\n field=models.DateField(blank=True, null=True),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Payment_Type',\n field=models.CharField(blank=True, max_length=120, null=True),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Start_Date',\n field=models.DateField(blank=True, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5658174157142639,
"alphanum_fraction": 0.6008492708206177,
"avg_line_length": 35.68000030517578,
"blob_id": "4fcddbcefddffa21e3911dff71934872a97280fd",
"content_id": "e478ac170165865df51327646a85491ee544f857",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 942,
"license_type": "no_license",
"max_line_length": 205,
"num_lines": 25,
"path": "/manage_locations/migrations/0007_dflocationpaymentmethod.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-11 10:50\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('manage_locations', '0006_auto_20200411_1556'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfLocationPaymentMethod',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Payment_Method', models.CharField(blank=True, max_length=20, null=True)),\r\n ('Business_Location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Df_location_payments', to='manage_locations.DfBusinessLocation')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Location Payment Method',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.5027124881744385,
"alphanum_fraction": 0.5696202516555786,
"avg_line_length": 23.04347801208496,
"blob_id": "9b90732cd1bf50c3ec3c6986ef99a07a67430205",
"content_id": "caa175c60e5b6dc4ae0bc26cc259d26e85409b87",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 553,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 23,
"path": "/queryes/migrations/0003_auto_20200905_0827.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-05 08:27\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('queryes', '0002_auto_20200905_0821'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfqueryinfo',\n name='Name',\n field=models.CharField(max_length=120),\n ),\n migrations.AlterField(\n model_name='dfqueryinfo',\n name='Your_Email',\n field=models.CharField(max_length=120),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5070754885673523,
"alphanum_fraction": 0.5801886916160583,
"avg_line_length": 21.55555534362793,
"blob_id": "ac7207aebcace6793ae2f5ecdfb3a551589cb46e",
"content_id": "1dfa998b6fafad3e32c9ae83029f3670b110607c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 424,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 18,
"path": "/manage_locations/migrations/0005_auto_20200410_1800.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-10 12:30\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('manage_locations', '0004_auto_20200410_1720'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameField(\r\n model_name='dfbusinesslocation',\r\n old_name='Business_catugory',\r\n new_name='Business_category',\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.8091133236885071,
"alphanum_fraction": 0.8091133236885071,
"avg_line_length": 32.83333206176758,
"blob_id": "3eca36dc4523841296e2514412842814157bc1d2",
"content_id": "f093d9c5aebe372f275d3dc71881d640ce270248",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 812,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 24,
"path": "/manage_jobs/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from .models import DfJobs,DfJobApplaicationSet\nfrom .serializers import DfJobsSerializer,DfJobsApplicationSerializer\nfrom rest_framework import generics,viewsets\nfrom .api_pagination import ProductLimitOffsetPagination , PrtoductPageNumberPagination\n# Create your views here.\n\n\nclass GetJobs(generics.ListCreateAPIView):\n queryset = DfJobs.objects.all().order_by(\"-id\")\n serializer_class = DfJobsSerializer\n agination_class = PrtoductPageNumberPagination\n\n\nclass AddJobsApplication(generics.ListCreateAPIView):\n queryset = DfJobApplaicationSet.objects.all().order_by(\"-id\")\n serializer_class = DfJobsApplicationSerializer\n agination_class = PrtoductPageNumberPagination\n\n\n\n\nclass GetOneJob(generics.RetrieveAPIView):\n queryset = DfJobs.objects.all()\n serializer_class = DfJobsSerializer\n"
},
{
"alpha_fraction": 0.5414141416549683,
"alphanum_fraction": 0.5787878632545471,
"avg_line_length": 32,
"blob_id": "94f6a406b49b8a01c89406927e803bb141b01655",
"content_id": "c5122b10f91b2c4cedfd14d4ad4ed849ea62ad6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 990,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 30,
"path": "/manage_pricing/migrations/0005_auto_20200922_1205.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 12:05\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0004_auto_20200922_1146'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfPackageName',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=120, unique=True)),\n ('keyword', models.CharField(max_length=120, unique=True)),\n ],\n options={\n 'verbose_name_plural': 'DF Package',\n },\n ),\n migrations.AlterField(\n model_name='dfprice',\n name='Package_Type',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_pricing.DfPackageName'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.578635036945343,
"alphanum_fraction": 0.5910979509353638,
"avg_line_length": 39.119049072265625,
"blob_id": "f92fc372b92491d77c476832b7e789f90d7d4534",
"content_id": "ea1183040479bb433bc44986ff0e488e7d14ca68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1685,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 42,
"path": "/manage_jobs/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-10 11:21\n\nimport autoslug.fields\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfJobCategory',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('CategoryName', models.CharField(max_length=120, unique=True)),\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ],\n options={\n 'verbose_name_plural': 'DF Job Category',\n },\n ),\n migrations.CreateModel(\n name='DfJobs',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Job_Title', models.CharField(max_length=120)),\n ('Job_slug', autoslug.fields.AutoSlugField(always_update=True, blank=True, editable=False, null=True, populate_from='Job_Title', unique_with=('Create_date__month',))),\n ('Job_Description', models.TextField()),\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('Category_name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='AwWineType_DfJobs', to='manage_jobs.DfJobCategory')),\n ],\n options={\n 'verbose_name_plural': 'DF Job Category',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.7651556730270386,
"alphanum_fraction": 0.7673402428627014,
"avg_line_length": 53.54545593261719,
"blob_id": "056430a90f1b0e097e5d86bb83dfec2510e9cbd0",
"content_id": "09155d542f4be0de48b69fb1590bc4235babcc7c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1831,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 33,
"path": "/manage_locations/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nfrom import_export.admin import ImportExportModelAdmin\r\nfrom .models import DfBusinessLocation,DfLocationImage,DfLocationPaymentMethod,DfLocationConnectPlatform,DfLocationOpenHours\r\n# Register your models here.\r\n\r\nclass DfBusinessLocationAdmin(ImportExportModelAdmin):\r\n search_fields = ['Store_Code']\r\n list_display = ('Store_Code','Business_category','Country','State','City','Zipcode','Phone_no','Business_Owner_Name','Craete_Date')\r\n list_filter = ('Location_name','Business_category','Country','State','City','Zipcode','Craete_Date',)\r\n\r\nclass DfLocationImageAdmin(ImportExportModelAdmin):\r\n search_fields = ['Business_Location']\r\n list_display = ('Business_Location','Image','Craete_Date')\r\n list_filter = ('Craete_Date',)\r\n\r\nclass DfLocationPaymentMethodAdmin(ImportExportModelAdmin):\r\n search_fields = ['Business_Location']\r\n list_display = ('Business_Location','Payment_Method')\r\n\r\nclass DfLocationConnectPlatforAdmin(ImportExportModelAdmin):\r\n list_display = ('DfUser','Business_Location','Social_Platform','Connection_Status','Craete_Date','Update_Date')\r\n list_filter = ('DfUser','Business_Location','Social_Platform','Connection_Status','Craete_Date',)\r\n\r\nclass DfLocationOpenHoursAdmin(ImportExportModelAdmin):\r\n list_display = ('Business_Location','date','Day','Type','Open_status','start_time_1','end_time_1','start_time_2','end_time_2')\r\n list_filter = ('Type','Open_status','Business_Location',)\r\n\r\nadmin.site.register(DfBusinessLocation,DfBusinessLocationAdmin)\r\nadmin.site.register(DfLocationImage,DfLocationImageAdmin)\r\nadmin.site.register(DfLocationPaymentMethod,DfLocationPaymentMethodAdmin)\r\nadmin.site.register(DfLocationConnectPlatform,DfLocationConnectPlatforAdmin)\r\n\r\nadmin.site.register(DfLocationOpenHours,DfLocationOpenHoursAdmin)"
},
{
"alpha_fraction": 0.5781893134117126,
"alphanum_fraction": 0.609739363193512,
"avg_line_length": 44.5625,
"blob_id": "b80993af27871e50f9605c2e911e1c465eaab1cc",
"content_id": "21e3a2ea1cfea22c67f805f0dfa8ab3d8de7b05f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1458,
"license_type": "no_license",
"max_line_length": 207,
"num_lines": 32,
"path": "/manage_locations/migrations/0011_dflocationopenhours.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-16 09:25\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0010_auto_20200416_0910'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfLocationOpenHours',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Day', models.CharField(blank=True, max_length=20, null=True)),\n ('Type', models.CharField(blank=True, max_length=20, null=True)),\n ('Open_status', models.CharField(blank=True, max_length=20, null=True)),\n ('start_time_1', models.CharField(blank=True, max_length=20, null=True)),\n ('end_time_1', models.CharField(blank=True, max_length=20, null=True)),\n ('start_time_2', models.CharField(blank=True, max_length=20, null=True)),\n ('Update_Date', models.DateTimeField(default=django.utils.timezone.now)),\n ('Business_Location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Df_location_poen_houre', to='manage_locations.DfBusinessLocation')),\n ],\n options={\n 'verbose_name_plural': 'DF Location Open Hours',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.5864839553833008,
"alphanum_fraction": 0.5902646780014038,
"avg_line_length": 45.57303237915039,
"blob_id": "57eb5674cc6d092db6a736950a9ceca4796eecff",
"content_id": "7345f180c90571d7d7d4856bbbb2385d5e3c35b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4232,
"license_type": "no_license",
"max_line_length": 123,
"num_lines": 89,
"path": "/reviews/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.shortcuts import get_object_or_404\r\nfrom rest_framework import exceptions\r\nfrom accounts.models import DfUser\r\nfrom .models import DfLocationReviews\r\nfrom manage_locations.models import DfBusinessLocation\r\n\r\n\r\n\r\n\r\nclass SaveReviewsSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Social_Plateform = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n User_Name = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Reating = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Review = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n User_Image_URL = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n Review_dateTime = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n Location_id = data.get(\"Location_id\", \"\")\r\n Social_Plateform = data.get(\"Social_Plateform\", \"\")\r\n User_Name = data.get(\"User_Name\", \"\")\r\n Reating = data.get(\"Reating\", \"\")\r\n Review = data.get(\"Review\", \"\")\r\n User_Image_URL = data.get(\"User_Image_URL\", \"\")\r\n Review_dateTime = data.get(\"Review_dateTime\", \"\")\r\n message = \"\"\r\n if Location_id:\r\n if DfBusinessLocation.objects.filter(id=Location_id).exists():\r\n get_bl_ins = get_object_or_404(DfBusinessLocation,id=Location_id)\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if get_bl_ins.DfUser.id == get_Dfuser_ins.id:\r\n add_review = DfLocationReviews(\r\n Df_User = get_Dfuser_ins,\r\n Business_Location = get_bl_ins,\r\n Social_Plateform = Social_Plateform,\r\n User_Name = User_Name,\r\n Reating = Reating,\r\n Review = Review,\r\n User_Image_URL = User_Image_URL,\r\n Review_dateTime = Review_dateTime\r\n )\r\n add_review.save()\r\n message = \"Review inserted successfully.\"\r\n else:\r\n msg = \"This Location_id is not related to current login user.\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n msg = \"Location_id is not exists.\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n mes = \"Must provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return message\r\n\r\n\r\n\r\n\r\nclass GetLocationserializer(serializers.Serializer):\r\n Location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n Location_id = data.get(\"Location_id\", \"\")\r\n user_id = data.get(\"user_id\", \"\")\r\n location_ins = None\r\n if DfBusinessLocation.objects.filter(id=Location_id).exists():\r\n get_bl_ins = get_object_or_404(DfBusinessLocation, id=Location_id)\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if get_bl_ins.DfUser.id == get_Dfuser_ins.id:\r\n location_ins = get_bl_ins\r\n else:\r\n msg = \"This Location_id is not related to current login user.\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n msg = \"Location_id is not exists.\"\r\n raise exceptions.ValidationError(msg)\r\n return location_ins\r\n\r\n\r\nclass GetReviewsSerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfLocationReviews\r\n fields = ['id', 'Social_Plateform', 'User_Name', 'Reating', 'Review', 'User_Image_URL',\r\n 'Review_dateTime', 'Craete_Date','Business_Location','Df_User']\r\n depth = 2"
},
{
"alpha_fraction": 0.7034482955932617,
"alphanum_fraction": 0.7083743810653687,
"avg_line_length": 38.07692337036133,
"blob_id": "6c3664a645a9b9812eff1f72492d9f43345ab08d",
"content_id": "aa76f5c75db887a159248281e69d3da378c42657",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1015,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 26,
"path": "/manage_bloges/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport django\nfrom autoslug import AutoSlugField\nfrom datetime import date\n# Create your models here.\n\n\ndef user_directory_path_for_banner(instance, filename):\n project_id_in_list = instance.Blog_Title.split(\" \")\n today_date = date.today()\n project_id_in_string = '_'.join([str(elem) for elem in project_id_in_list])\n return '{0}/{1}'.format(\"blogs/\"+project_id_in_string+\"/banner/\"+str(today_date.year)+\"/\"+str(today_date.month)+\"/\"+str(today_date.day),filename)\n\n\nclass DfBlogs(models.Model):\n Blog_Title = models.CharField(max_length=120)\n Blog_slug = AutoSlugField(populate_from='Blog_Title', always_update=True,unique_with='Create_date__month',null=True, blank=True)\n Blog_Image = models.ImageField(upload_to=user_directory_path_for_banner)\n Message = models.TextField()\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\n\n def __str__(self):\n return self.Blog_Title\n\n class Meta:\n verbose_name_plural = \"DF Blogs\""
},
{
"alpha_fraction": 0.6043689250946045,
"alphanum_fraction": 0.6365291476249695,
"avg_line_length": 49.5,
"blob_id": "7f97b882cb4e7ae2f7a3af597e9c97f5be0b8ca4",
"content_id": "0edf3733a5067f7587883b124dc309b7ea0d9426",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1648,
"license_type": "no_license",
"max_line_length": 208,
"num_lines": 32,
"path": "/manage_locations/migrations/0008_dflocationconnectplatfor.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-13 10:16\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('social_media_platforms', '0002_auto_20200413_1243'),\r\n ('accounts', '0005_auto_20200410_1503'),\r\n ('manage_locations', '0007_dflocationpaymentmethod'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfLocationConnectPlatfor',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Connection_Status', models.CharField(blank=True, max_length=20, null=True)),\r\n ('Craete_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Update_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Business_Location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Df_location_connectWith', to='manage_locations.DfBusinessLocation')),\r\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\r\n ('Social_Platform', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Df_location_connectWith', to='social_media_platforms.DfSocialMedia')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Business_Location Connect With Social Media',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.7260273694992065,
"alphanum_fraction": 0.7260273694992065,
"avg_line_length": 38.880001068115234,
"blob_id": "f208beeecf8861befe628878d4eab929d2a8bf58",
"content_id": "8483760cd474b7cbebbd9371bd1b11726ceb8971",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1022,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 25,
"path": "/manage_dropdown_value/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nfrom .models import DfBusinessCategory,DfCountry,DfState\r\nfrom import_export.admin import ImportExportModelAdmin\r\n# Register your models here.\r\n\r\nclass DfBusinessCategoryAdmin(ImportExportModelAdmin):\r\n search_fields = ['Category_Name']\r\n list_display = ('Category_Name','Status','Create_by','Create_date')\r\n list_filter = ('Status','Create_by','Create_date',)\r\n\r\nclass DfCountryAdmin(ImportExportModelAdmin):\r\n search_fields = ['Country_Name']\r\n list_display = ('Country_Name','Status','Create_by','Create_date')\r\n list_filter = ('Status','Create_by','Create_date',)\r\n\r\nclass DfStateAdmin(ImportExportModelAdmin):\r\n search_fields = ['State_name']\r\n list_display = ('State_name','Country_Name','Status','Create_by','Create_date')\r\n list_filter = ('Country_Name','Status','Create_by','Create_date',)\r\n\r\n\r\n\r\nadmin.site.register(DfBusinessCategory,DfBusinessCategoryAdmin)\r\nadmin.site.register(DfCountry,DfCountryAdmin)\r\nadmin.site.register(DfState,DfStateAdmin)\r\n"
},
{
"alpha_fraction": 0.5087719559669495,
"alphanum_fraction": 0.6081871390342712,
"avg_line_length": 25,
"blob_id": "a08fe5d1195abf72c8a9d8647c89ff6a888ada76",
"content_id": "cb9d166017f6cb450800b3007952dddf8d80116b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 513,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 19,
"path": "/manage_locations/migrations/0009_auto_20200413_1648.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-13 11:18\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('accounts', '0005_auto_20200410_1503'),\r\n ('social_media_platforms', '0002_auto_20200413_1243'),\r\n ('manage_locations', '0008_dflocationconnectplatfor'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameModel(\r\n old_name='DfLocationConnectPlatfor',\r\n new_name='DfLocationConnectPlatform',\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.5127795338630676,
"alphanum_fraction": 0.5670926570892334,
"avg_line_length": 27.454545974731445,
"blob_id": "7aeacd26020fa7d6fb1e14554413be24552b24a6",
"content_id": "55be693dd22f4d2312a2ab45d27d3237532136c1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 626,
"license_type": "no_license",
"max_line_length": 169,
"num_lines": 22,
"path": "/manage_pricing/migrations/0009_auto_20200922_1216.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 12:16\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0008_auto_20200922_1211'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='dfprice',\n name='PackageType',\n ),\n migrations.AddField(\n model_name='dfprice',\n name='Package_Type',\n field=models.CharField(blank=True, choices=[('S', 'Start'), ('B', 'Business'), ('P', 'Professional'), ('M', 'Max')], max_length=120, null=True, unique=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7757009267807007,
"alphanum_fraction": 0.7757009267807007,
"avg_line_length": 20.399999618530273,
"blob_id": "9348463fd9b2fe3ff614e3808f4ffbf6e2771f29",
"content_id": "99e3a9bf2ba7c809765c902a6f579115b8aea373",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 107,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 5,
"path": "/manage_voice_faqs/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManageVoiceFaqsConfig(AppConfig):\n name = 'manage_voice_faqs'\n"
},
{
"alpha_fraction": 0.7584905624389648,
"alphanum_fraction": 0.7584905624389648,
"avg_line_length": 45.818180084228516,
"blob_id": "c1f0ba0a91495ed7c1ada5db2b1394672774ecb3",
"content_id": "e652cc19151a35e0b260b01a58b024c8f16428cd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 530,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 11,
"path": "/reviews/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nfrom .models import DfLocationReviews\r\nfrom import_export.admin import ImportExportModelAdmin\r\n# Register your models here.\r\n\r\nclass DfLocationReviewsAdmin(ImportExportModelAdmin):\r\n search_fields = ['Store_Code']\r\n list_display = ('Df_User','Business_Location','Social_Plateform','User_Name','Reating','Review','Review_dateTime','Craete_Date')\r\n list_filter = ('Business_Location','Social_Plateform','Craete_Date',)\r\n\r\nadmin.site.register(DfLocationReviews,DfLocationReviewsAdmin)\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.7745097875595093,
"alphanum_fraction": 0.7745097875595093,
"avg_line_length": 19.399999618530273,
"blob_id": "568be23560a37b1e288fee0227b2cd9f26183571",
"content_id": "9dbdcd9b624f678b7c229b2e7d724c04a207a3f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 102,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 5,
"path": "/manage_pricing/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManagePricingConfig(AppConfig):\n name = 'manage_pricing'\n"
},
{
"alpha_fraction": 0.470822274684906,
"alphanum_fraction": 0.511936366558075,
"avg_line_length": 22.5625,
"blob_id": "abc650e1509678454cc6f0290fe71a646f5c2f0b",
"content_id": "8a0a2d6a84521ec9fa7b32153aa38438855618d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 754,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 32,
"path": "/accounts/migrations/0006_auto_20200806_1141.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-06 11:41\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('accounts', '0005_auto_20200410_1503'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='dfuser',\n old_name='Business_name',\n new_name='Company_name',\n ),\n migrations.RenameField(\n model_name='dfuser',\n old_name='City',\n new_name='Country',\n ),\n migrations.RenameField(\n model_name='dfuser',\n old_name='State',\n new_name='Pnone',\n ),\n migrations.RemoveField(\n model_name='dfuser',\n name='Address',\n ),\n ]\n"
},
{
"alpha_fraction": 0.7079002261161804,
"alphanum_fraction": 0.7255717515945435,
"avg_line_length": 43.904762268066406,
"blob_id": "a9cecbb0c82fd7666616fdee36707fa8d9ae7bf9",
"content_id": "cc3571e81249876a2430c744b8082b32cf18a43f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 962,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 21,
"path": "/social_media_platforms/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom accounts.models import DfUser\r\nimport django\r\n# Create your models here.\r\n\r\nclass DfSocialMedia(models.Model):\r\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n Platform = models.CharField(max_length=50)\r\n Token = models.CharField(max_length=120,null=True,blank=True)\r\n Username = models.CharField(max_length=120,null=True,blank=True)\r\n Email = models.CharField(max_length=120,null=True,blank=True)\r\n Password = models.CharField(max_length=120,null=True,blank=True)\r\n Connect_status = models.CharField(max_length=120,null=True,blank=True)\r\n Other_info = models.TextField(null=True,blank=True)\r\n Craete_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n Update_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return self.Platform\r\n class Meta:\r\n verbose_name_plural = \"DF Social Media\""
},
{
"alpha_fraction": 0.5739837288856506,
"alphanum_fraction": 0.6065040826797485,
"avg_line_length": 25.7391300201416,
"blob_id": "e00298d4572f807a925c2246a9a25f7b7aab685d",
"content_id": "62a924b43e73a5c14d582c470c34c2f9df07f7af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 615,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 23,
"path": "/manage_locations/migrations/0013_auto_20200416_1220.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-16 12:20\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0012_dflocationopenhours_end_time_2'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfbusinesslocation',\n name='Do_not_publish_my_address',\n field=models.BooleanField(default=True),\n ),\n migrations.AddField(\n model_name='dfbusinesslocation',\n name='Franchise_Location',\n field=models.BooleanField(default=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7604166865348816,
"alphanum_fraction": 0.7604166865348816,
"avg_line_length": 18.200000762939453,
"blob_id": "d5f9bce19d3cfbc26daaeef5372f73841414366d",
"content_id": "47475bff5e4a2e287336c44f8f50ca9930a82cac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 96,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 5,
"path": "/manage_jobs/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManageJobsConfig(AppConfig):\n name = 'manage_jobs'\n"
},
{
"alpha_fraction": 0.5886489152908325,
"alphanum_fraction": 0.5928344130516052,
"avg_line_length": 37.30921173095703,
"blob_id": "57be010138333b6959cf1d977850df851d9b4d41",
"content_id": "e86b5f1034b656bb50e2a9b8b1ebd9271e47a8f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5973,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 152,
"path": "/accounts/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.contrib.auth.models import User\r\nfrom accounts.models import testUser,DfUser\r\nfrom django.contrib.auth import authenticate\r\nfrom rest_framework.authtoken.models import Token\r\nfrom rest_framework import exceptions\r\nfrom django.utils.encoding import force_bytes, force_text\r\nfrom django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode\r\nfrom dashifyproject.tokens import account_activation_token\r\nimport django\r\n\r\n\r\nclass PaswordResetSerializers(serializers.Serializer):\r\n pera_1 = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n pera_2 = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n password = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n pera_1 = data.get(\"pera_1\", \"\")\r\n pera_2 = data.get(\"pera_2\", \"\")\r\n password = data.get(\"password\", \"\")\r\n\r\n message = \"\"\r\n try:\r\n uid = force_text(urlsafe_base64_decode(pera_1))\r\n user = User.objects.get(pk=uid)\r\n except(TypeError, ValueError, OverflowError, User.DoesNotExist):\r\n user = None\r\n if user is not None and account_activation_token.check_token(user, pera_2): \r\n user.set_password(password)\r\n user.save()\r\n message = \"Your password is set successfuly.\"\r\n else:\r\n mes = \"Link is invalide\"\r\n raise exceptions.ValidationError(mes) \r\n return message\r\n\r\n\r\nclass EmailSerializers(serializers.Serializer):\r\n email_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n email_id = data.get(\"email_id\", \"\")\r\n if User.objects.filter(email=email_id).exists():\r\n data[\"user\"] = User.objects.get(email=email_id)\r\n else:\r\n mes = \"email_id is incorrcet.\"\r\n raise exceptions.ValidationError(mes) \r\n return data \r\nclass UserSerializers(serializers.ModelSerializer):\r\n class Meta:\r\n medel = User\r\n fields = '__all__'\r\n\r\n\r\nclass AccountActivateSerializers(serializers.Serializer):\r\n pera_1 = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n pera_2 = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n pera_1 = data.get(\"pera_1\", \"\")\r\n pera_2 = data.get(\"pera_2\", \"\")\r\n message = \"\"\r\n try:\r\n uid = force_text(urlsafe_base64_decode(pera_1))\r\n user = User.objects.get(pk=uid)\r\n except(TypeError, ValueError, OverflowError, User.DoesNotExist):\r\n user = None\r\n if user is not None and account_activation_token.check_token(user, pera_2):\r\n user.is_active = True\r\n user.save()\r\n message = \"Account is activated. please login.\"\r\n else:\r\n message = \"Varification link is invalide.\"\r\n return message \r\n\r\n\r\nclass RegistrationSerializers(serializers.ModelSerializer):\r\n first_name = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n last_name = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Company_name = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Country = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Phone = serializers.CharField(style={\"inpupt_type\":\"number\"},write_only=True)\r\n Zip = serializers.CharField(style={\"inpupt_type\":\"email\"},write_only=True)\r\n class Meta:\r\n model = User\r\n fields = ['first_name','last_name','username','password','Company_name','Country','Phone','Zip']\r\n eextra_kwargs = {\r\n 'password':{'write_only':True}\r\n }\r\n\r\n\r\n\r\n def save(self):\r\n Userset = User(\r\n username = self.validated_data['username'],\r\n first_name=self.validated_data['first_name'],\r\n last_name=self.validated_data['last_name'],\r\n email = self.validated_data['username'],\r\n is_active = False,\r\n )\r\n password = self.validated_data['password']\r\n Userset.set_password(password)\r\n Userset.save()\r\n\r\n DfUser_set = DfUser(\r\n user = Userset,\r\n first_name = self.validated_data['first_name'],\r\n last_name = self.validated_data['last_name'],\r\n Company_name = self.validated_data['Company_name'],\r\n Country = self.validated_data['Country'],\r\n Phone = self.validated_data['Phone'],\r\n Zip = self.validated_data['Zip']\r\n )\r\n\r\n DfUser_set.save()\r\n return Userset\r\n# ===========================================\r\n\r\nclass LoginSerializers(serializers.Serializer):\r\n username = serializers.CharField()\r\n password = serializers.CharField()\r\n\r\n def validate(self, data):\r\n username = data.get(\"username\",\"\")\r\n password = data.get(\"password\",\"\")\r\n\r\n if username and password:\r\n user = authenticate(username=username,password=password)\r\n if user:\r\n if user.is_active:\r\n data[\"user\"] = user\r\n\r\n else:\r\n mes = \"User is not activate.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Username and pasword is incorrect & may be your account is not activate.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide username and password\"\r\n raise exceptions.ValidationError(mes)\r\n return data\r\n\r\n\r\nclass DfUserSerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfUser\r\n fields = ('id', 'first_name','last_name','Company_name','Country','Phone','Zip','Last_login','user')\r\n depth = 2\r\n\r\n# ==========================================="
},
{
"alpha_fraction": 0.8178913593292236,
"alphanum_fraction": 0.8178913593292236,
"avg_line_length": 38.25,
"blob_id": "fd8580d06891a0195a2c5a5b3d79904177df3e6d",
"content_id": "f4b5a62a2137ec1b361a1e3c3e23d52f1db82f7b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 313,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 8,
"path": "/manage_voice_faqs/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfVoiceFaqs\n# Register your models here.\n\nclass DfVoiceFaqsAdmin(ImportExportModelAdmin):\n list_display = ('DfUser','Location','question','Craete_Date')\nadmin.site.register(DfVoiceFaqs,DfVoiceFaqsAdmin)"
},
{
"alpha_fraction": 0.49210527539253235,
"alphanum_fraction": 0.5421052575111389,
"avg_line_length": 19.11111068725586,
"blob_id": "8e0dbcafd65433c4b63180986c248a6216e001ff",
"content_id": "4e85178e2616453836dad6d061f927f526255611",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 380,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 18,
"path": "/reviews/migrations/0002_auto_20200420_1514.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-20 09:44\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('reviews', '0001_initial'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameField(\r\n model_name='dflocationreviews',\r\n old_name='User',\r\n new_name='Df_User',\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.7521663904190063,
"alphanum_fraction": 0.7521663904190063,
"avg_line_length": 31.11111068725586,
"blob_id": "48be844b7ee93c187a8367935d47db3aa32bd8a9",
"content_id": "33910e2d66c13ed2141b01b56a61592bfcade127",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 577,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 18,
"path": "/queryes/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom .models import DfQueryInfo\nfrom .serializers import QuerysetSerializer\nfrom rest_framework import mixins\nfrom rest_framework import generics\n\n# Create your views here.\n\nclass AddQuery(mixins.ListModelMixin,mixins.CreateModelMixin,generics.GenericAPIView):\n queryset = DfQueryInfo.objects.all()\n serializer_class = QuerysetSerializer\n\n\n def get(self, request, *args, **kwargs):\n return self.list(request, *args, **kwargs)\n\n def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)"
},
{
"alpha_fraction": 0.7604166865348816,
"alphanum_fraction": 0.7604166865348816,
"avg_line_length": 18.200000762939453,
"blob_id": "0b2b65473ca912e4b2d06ed5fd9aad24e13e67b0",
"content_id": "c6c094d468aff10adfa9374eaaf122a70d5d2527",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 96,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 5,
"path": "/manage_faqs/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManageFaqsConfig(AppConfig):\n name = 'manage_faqs'\n"
},
{
"alpha_fraction": 0.5208845138549805,
"alphanum_fraction": 0.5970516204833984,
"avg_line_length": 21.61111068725586,
"blob_id": "f77a3c6690718ea2b103483fd3a793cd3f1e9cd7",
"content_id": "7dac49722d631a71e838307e7c0d8c467d0ebd75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 407,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 18,
"path": "/manage_campus/migrations/0004_dfcampaign_extera_data.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-10 09:25\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_campus', '0003_auto_20200810_0748'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfcampaign',\n name='Extera_data',\n field=models.TextField(blank=True, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5212053656578064,
"alphanum_fraction": 0.5502232313156128,
"avg_line_length": 31,
"blob_id": "88ab79ee52740aede8edf856c681f482efa6670d",
"content_id": "7e97fd3f895167cf2515a70e2815809db18fd819",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 896,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 28,
"path": "/manage_pricing/migrations/0002_auto_20200922_1108.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 11:08\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfprice',\n name='Duration_Type',\n field=models.CharField(blank=True, choices=[('D', 'Days'), ('M', 'Mohnth'), ('Y', 'Year')], max_length=120, null=True, unique=True),\n ),\n migrations.AddField(\n model_name='dfprice',\n name='Duration_time',\n field=models.IntegerField(default=0),\n ),\n migrations.AlterField(\n model_name='dfprice',\n name='Package_Type',\n field=models.CharField(choices=[('S', 'Start'), ('B', 'Business'), ('P', 'Professional'), ('M', 'Max')], max_length=120, unique=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5622775554656982,
"alphanum_fraction": 0.6049821972846985,
"avg_line_length": 42.230770111083984,
"blob_id": "e01ec0e05b176ed0d10afe02916727bab052c5e8",
"content_id": "2b24ab9f1311550984a9ad5c8ac6b69e73db0442",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1686,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 39,
"path": "/manage_campus/migrations/0003_auto_20200810_0748.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-10 07:48\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\nimport manage_campus.models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0019_auto_20200420_1405'),\n ('accounts', '0007_auto_20200806_1201'),\n ('manage_campus', '0002_auto_20200810_0739'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfCampaign',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Title', models.CharField(max_length=150)),\n ('Sent_from', models.CharField(max_length=150)),\n ('replay_to', models.CharField(max_length=150)),\n ('message', models.TextField(blank=True, null=True)),\n ('Image', models.ImageField(blank=True, null=True, upload_to=manage_campus.models.user_directory_path_for_banner)),\n ('sms_message', models.TextField(blank=True, null=True)),\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('BusinessLocation', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_locations.DfBusinessLocation')),\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\n ],\n options={\n 'verbose_name_plural': 'DF Campaign',\n },\n ),\n migrations.DeleteModel(\n name='DfCampus',\n ),\n ]\n"
},
{
"alpha_fraction": 0.6849222183227539,
"alphanum_fraction": 0.6943880915641785,
"avg_line_length": 38,
"blob_id": "24cbdac428d66ece14d33ce207464cfacb8f2e34",
"content_id": "10d05322f10f62e06640002bc3068647d20a2ab8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2958,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 74,
"path": "/manage_campus/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom accounts.models import DfUser\r\nimport django\r\nfrom datetime import date\r\nfrom manage_locations.models import DfBusinessLocation\r\n\r\n\r\n\r\ndef user_directory_path_for_banner(instance, filename):\r\n project_id_in_list = instance.Title.split(\" \")\r\n today_date = date.today()\r\n project_id_in_string = '_'.join([str(elem) for elem in project_id_in_list])\r\n return '{0}/{1}'.format(project_id_in_string+\"/campus/banner/\"+str(today_date.year)+\"/\"+str(today_date.month)+\"/\"+str(today_date.day),filename)\r\n\r\n\r\nclass DfCampaign(models.Model):\r\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n BusinessLocation = models.ForeignKey(DfBusinessLocation, on_delete=models.SET_NULL, null=True, blank=True)\r\n Head = models.CharField(max_length=500, null=True, blank=True)\r\n Subject = models.CharField(max_length=500, null=True, blank=True)\r\n Title = models.CharField(max_length=500)\r\n Sent_from = models.CharField(max_length=500)\r\n replay_to = models.CharField(max_length=500)\r\n message = models.TextField(null=True,blank=True)\r\n Image = models.ImageField(upload_to=user_directory_path_for_banner,null=True,blank=True)\r\n sms_message = models.TextField(null=True,blank=True)\r\n Extera_data = models.TextField(null=True, blank=True)\r\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n\r\n def __str__(self):\r\n return self.Title\r\n\r\n class Meta:\r\n verbose_name_plural = \"DF Campaign\"\r\n\r\n\r\n\r\nclass DfUseremail(models.Model):\r\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n Campign = models.ForeignKey(DfCampaign, on_delete=models.SET_NULL, null=True, blank=True)\r\n Email = models.CharField(max_length=500, null=True, blank=True)\r\n Contact = models.CharField(max_length=500, null=True, blank=True)\r\n Name = models.CharField(max_length=500, null=True, blank=True)\r\n mail_sent_status = models.BooleanField(default=False)\r\n Sent_date = models.DateTimeField(null=True, blank=True)\r\n\r\n\r\n def __str__(self):\r\n return self.Email\r\n\r\n class Meta:\r\n verbose_name_plural = \"DF User Email\"\r\n\r\n\r\ndef upload_image_path_for_banner(instance, filename):\r\n project_id_in_list = \"upload_image_for_url\"\r\n today_date = date.today()\r\n project_id_in_string = project_id_in_list\r\n return '{0}/{1}'.format(project_id_in_string+\"/image/\"+str(today_date.year)+\"/\"+str(today_date.month)+\"/\"+str(today_date.day),filename)\r\n\r\n\r\n\r\nclass DfUploadImage(models.Model):\r\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n UploadFile = models.ImageField(upload_to=upload_image_path_for_banner,null=True,blank=True)\r\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n\r\n def __str__(self):\r\n return str(self.DfUser)\r\n\r\n class Meta:\r\n verbose_name_plural = \"DF Upload Image\""
},
{
"alpha_fraction": 0.5367231369018555,
"alphanum_fraction": 0.590395450592041,
"avg_line_length": 19.823530197143555,
"blob_id": "d18a78ba8b5c5dffbf118133b2e3dec033a40627",
"content_id": "f050b056ad748b52d2d61ff80efbdc24fdb48ebb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 354,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 17,
"path": "/manage_faqs/migrations/0002_auto_20200910_1121.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-10 11:21\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_faqs', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterModelOptions(\n name='dffaqs',\n options={'verbose_name_plural': 'DF Question'},\n ),\n ]\n"
},
{
"alpha_fraction": 0.5754813551902771,
"alphanum_fraction": 0.5792412161827087,
"avg_line_length": 51.09198760986328,
"blob_id": "3d5dab9178292701fdb9157f18b011ef002a9668",
"content_id": "b6af6b68d6c4e7e9aab759363d5c35eb38b4716d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17554,
"license_type": "no_license",
"max_line_length": 221,
"num_lines": 337,
"path": "/manage_campus/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render,get_object_or_404\nfrom rest_framework.views import APIView\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\nfrom rest_framework import exceptions\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\nfrom rest_framework.permissions import IsAuthenticated\nfrom .serializear import AddCampaignSerializers,GetAllCampaignSerializers,GetAllCampaignSerializersData,GetAllCampaignSerializersCheckCampaignid\nfrom .serializear import RemovecampaignByIdSerializers,UploadImageViewSerializers,GetAllEmailSerializersCheckCampaignid,GetAllEmailSerializersData\nfrom dashifyproject.tokens import CsrfExemptSessionAuthentication\nfrom accounts.models import DfUser\nfrom manage_locations.models import DfBusinessLocation\nfrom .models import DfCampaign,DfUseremail,DfUploadImage\nfrom datetime import date\nfrom datetime import datetime\nimport base64\nfrom django.core.files.base import ContentFile\nfrom manage_dropdown_value.models import DfBusinessCategory,DfCountry,DfState\nimport email.message\nfrom django.template.loader import render_to_string\nimport smtplib\nfrom django.conf import settings\n\n\n\n\nclass RemoveEmailByCampaignIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n all_connection_set = {}\n if request.method == \"POST\":\n message = \"\"\n request.data[\"user_id\"] = self.request.user.id\n serializer = GetAllEmailSerializersCheckCampaignid(data=request.data)\n serializer.is_valid(raise_exception=True)\n email_ids = request.data[\"email_ids\"]\n if email_ids:\n email_ids_in_list = email_ids.split(\",\")\n DfUseremail.objects.filter(Campign=serializer.validated_data).filter(id__in=email_ids_in_list).delete()\n message = \"Emails removed\"\n else:\n data_response = \"please provide email_ids\"\n raise exceptions.ValidationError(mes)\n return Response({\"messgae\":message}, status=200)\n\n\n\nclass GetEmailByIdView(APIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n all_Email_data = {}\n request.data[\"user_id\"] = self.request.user.id\n serializer = GetAllEmailSerializersCheckCampaignid(data=request.data)\n serializer.is_valid(raise_exception=True)\n if DfUseremail.objects.filter(Campign=serializer.validated_data).exists():\n get_emails = DfUseremail.objects.filter(Campign=serializer.validated_data)\n all_EmailSerializer = GetAllEmailSerializersData(get_emails,many=True)\n all_Email_data = all_EmailSerializer.data\n return Response({\"emails\": all_Email_data}, status=200)\n\nclass UploadImageView(APIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self,request):\n data_response = {}\n request.data[\"user_id\"] = self.request.user.id\n serializer = UploadImageViewSerializers(data=request.data)\n if serializer.is_valid():\n user_id = self.request.user.id\n if DfUser.objects.filter(user__id=user_id).exists():\n get_Dfuser_ins = get_object_or_404(DfUser, user__id=user_id)\n # add image start\n image_file_get_cover = request.data[\"UploadFile\"]\n format_cover, imgstr_cover = image_file_get_cover.split(';base64,')\n ext_cover = format_cover.split('/')[-1]\n today_date = date.today()\n set_file_name_cover = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(\n today_date.year)\n file_name_cover = set_file_name_cover + \".\" + ext_cover\n upload_image_get = ContentFile(base64.b64decode(imgstr_cover), name=file_name_cover)\n upload_image = upload_image_get\n\n upload_imaage_ins = DfUploadImage(\n DfUser=get_Dfuser_ins,\n UploadFile=upload_image\n )\n upload_imaage_ins.save()\n data_response[\"message\"] = \"Image Upload successfully.\"\n data_response[\"image_id\"] = upload_imaage_ins.id\n data_response[\"image_url\"] = upload_imaage_ins.UploadFile.url\n get_data = \"Campaign create successfully.\"\n # add image end\n else:\n mes = \"Your user_id is incorrect.\"\n data_response = \"Your user_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n else:\n data_response = serializer.errors\n return Response(data_response)\n\nclass AddCampaignView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self,request):\n data_response = {}\n if request.method == \"POST\":\n request.data[\"user_id\"] = self.request.user.id\n serializer = AddCampaignSerializers(data=request.data)\n # serializer.is_valid(raise_exception=True)\n if serializer.is_valid():\n user_id = request.data[\"user_id\"]\n campaign__id = \"\"\n if \"campaign__id\" in request.data:\n campaign__id = request.data[\"campaign__id\"]\n Title = request.data[\"Title\"]\n Sent_from = request.data[\"Sent_from\"]\n replay_to = request.data[\"replay_to\"]\n message = request.data[\"message\"]\n sms_message = request.data[\"sms_message\"]\n location_id = request.data[\"location_id\"]\n Image = request.data[\"Image\"]\n Extera_data = request.data[\"Extera_data\"]\n Head = request.data[\"Head\"]\n Subject = request.data[\"Subject\"]\n upload_image = \"\"\n if DfUser.objects.filter(user__id=user_id).exists():\n get_Dfuser_ins = get_object_or_404(DfUser, user__id=user_id)\n get_DfBusinessLocation_ins = None\n Status_set = True\n if location_id:\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,\n DfUser=get_Dfuser_ins)\n else:\n Status_set = False\n mes = \"Your location_id is incorrect.\"\n data_response = \"Your location_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n if Image:\n image_file_get_cover = Image\n format_cover, imgstr_cover = image_file_get_cover.split(';base64,')\n ext_cover = format_cover.split('/')[-1]\n today_date = date.today()\n set_file_name_cover = str(today_date.day) + \"_\" + str(today_date.month) + \"_\" + str(\n today_date.year)\n file_name_cover = set_file_name_cover + \".\" + ext_cover\n upload_image_get = ContentFile(base64.b64decode(imgstr_cover), name=file_name_cover)\n upload_image = upload_image_get\n if Status_set:\n if campaign__id:\n get_Campaign_INS = get_object_or_404(DfCampaign,id=campaign__id)\n DfCampaign.objects.filter(id=get_Campaign_INS.id).update(\n DfUser=get_Dfuser_ins,\n BusinessLocation=get_DfBusinessLocation_ins,\n Head=Head,\n Subject=Subject,\n Title=Title,\n Sent_from=Sent_from,\n replay_to=replay_to,\n message=message,\n sms_message=sms_message,\n Extera_data=Extera_data\n )\n get_Campaign_INS.Image.delete(save=False)\n get_Campaign_INS.Image = upload_image\n get_Campaign_INS.save()\n # data_response = \"Campaign Update successfully.\"\n data_response[\"message\"] = \"Campaign Update successfully.\"\n data_response[\"campain_id\"] = get_Campaign_INS.id\n get_data = \"Campaign Update successfully.\"\n else:\n connect_plat = DfCampaign(\n DfUser=get_Dfuser_ins,\n BusinessLocation=get_DfBusinessLocation_ins,\n Head=Head,\n Subject=Subject,\n Title=Title,\n Sent_from=Sent_from,\n replay_to=replay_to,\n message=message,\n Image=upload_image,\n sms_message=sms_message,\n Extera_data=Extera_data\n )\n connect_plat.save()\n data_response[\"message\"] = \"Campaign create successfully.\"\n data_response[\"campain_id\"] = connect_plat.id\n get_data = \"Campaign create successfully.\"\n \n else:\n mes = \"Your user_id is incorrect.\"\n data_response = \"Your user_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n else:\n data_response = serializer.errors\n return Response(data_response)\n\nclass GetAllCampaignView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def get(self,request):\n all_Campaign_data = {}\n request.data[\"user_id\"] = self.request.user.id\n serializer = GetAllCampaignSerializers(data=request.data)\n serializer.is_valid(raise_exception=True)\n all_Campaign = DfCampaign.objects.filter(DfUser=serializer.validated_data).order_by(\"-id\")\n # all_CampaignSerializer = GetAllCampaignSerializersData(all_Campaign, many=True, context={\"request\":request})\n all_CampaignSerializer = GetAllCampaignSerializersData(all_Campaign, many=True)\n all_Campaign_data = all_CampaignSerializer.data\n return Response({\"all_campaign\":all_Campaign_data},status=200)\n\nclass GetCampaignByIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n all_Campaign_data = {}\n request.data[\"user_id\"] = self.request.user.id\n serializer = GetAllCampaignSerializersCheckCampaignid(data=request.data)\n serializer.is_valid(raise_exception=True)\n all_CampaignSerializer = GetAllCampaignSerializersData(serializer.validated_data)\n all_Campaign_data = all_CampaignSerializer.data\n return Response({\"campaign\": all_Campaign_data}, status=200)\n\nclass RemoveCampaignByIdView(APIView):\n authentication_classes = (TokenAuthentication,CsrfExemptSessionAuthentication,BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n all_connection_set = {}\n if request.method == \"POST\":\n request.data[\"user_id\"] = self.request.user.id\n serializer = RemovecampaignByIdSerializers(data=request.data)\n serializer.is_valid(raise_exception=True)\n return Response({\"messgae\": serializer.validated_data}, status=200)\n\nclass AddCampaignEmailView(APIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n user_id = self.request.user.id\n message_set = \"\"\n # request.data[\"user_id\"] = self.request.user.id\n # serializer = AddCampaignEmailSerializers(data=request.data)\n # serializer.is_valid(raise_exception=True)\n # if 'camp_id' in self.request.POST and 'emails' in self.request.POST and 'names' in self.request.POST:\n if user_id:\n if DfUser.objects.filter(user__id=user_id).exists():\n get_user_instance = get_object_or_404(DfUser, user__id=user_id)\n if request.data['camp_id']:\n if DfCampaign.objects.filter(id=request.data['camp_id'], DfUser=get_user_instance).exists():\n get_campaign_ins = get_object_or_404(DfCampaign, id=request.data['camp_id'], DfUser=get_user_instance)\n for i in range(0, len(request.data[\"emails\"])):\n if request.data[\"emails\"][str(i)]:\n ass_emails = DfUseremail(DfUser=get_user_instance, Campign=get_campaign_ins,Email=request.data[\"emails\"][str(i)], Name=request.data[\"names\"][str(i)],Contact=request.data[\"contact\"][str(i)])\n ass_emails.save()\n message_set = \"Email add in database successfully.\"\n else:\n mes = \"campaign_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n else:\n mes = \"Must provide campaign_id.\"\n raise exceptions.ValidationError(mes)\n else:\n mes = \"user_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n else:\n mes = \"Must provide user_id.\"\n raise exceptions.ValidationError(mes)\n # else:\n # mes = \"camp_id , emails ,names is required.\"\n # raise exceptions.ValidationError(mes)\n return Response({\"messgae\": message_set}, status=200)\n\n\ndef send_email_content(subject,message_content,send_email):\n email_content = message_content\n msg = email.message.Message()\n msg['Subject'] = subject\n msg['From'] = settings.EMAIL_HOST_USER\n msg['To'] = send_email\n password = settings.EMAIL_HOST_PASSWORD\n msg.add_header('Content-Type', 'text/html')\n msg.set_payload(email_content)\n s = smtplib.SMTP(settings.EMAIL_HOST + ':' + str(settings.EMAIL_PORT))\n s.starttls()\n s.login(msg['From'], password)\n s.sendmail(msg['From'], [msg['To']], msg.as_string())\n return \"True\"\n\nclass SendEmailsView(APIView):\n authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication, BasicAuthentication,)\n permission_classes = [IsAuthenticated]\n\n def post(self, request):\n message_set = \"\"\n user_id = self.request.user.id\n if user_id:\n if DfUser.objects.filter(user__id=user_id).exists():\n get_user_instance = get_object_or_404(DfUser, user__id=user_id)\n if request.data['camp_id']:\n if DfCampaign.objects.filter(id=request.data['camp_id'], DfUser=get_user_instance).exists():\n get_campaign_ins = get_object_or_404(DfCampaign, id=request.data['camp_id'], DfUser=get_user_instance)\n limit = 5\n if request.data['send_limit']:\n limit = request.data['send_limit']\n if DfUseremail.objects.filter(Campign=get_campaign_ins).exists():\n get_emails = DfUseremail.objects.filter(Campign=get_campaign_ins)[:limit]\n for item in get_emails:\n mail_content_content = str(get_campaign_ins.message).replace('{name}',item.Name)\n status = send_email_content(get_campaign_ins.Subject, mail_content_content,item.Email)\n DfUseremail.objects.filter(id=item.id).update(mail_sent_status=True,Sent_date=datetime.now())\n # message_set += \"||\"+item.Name+\"==\"+str(status)\n message_set = \"Send All Email.\"\n else:\n message_set = \"All email is sent.\"\n else:\n mes = \"campaign_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n else:\n mes = \"Must provide campaign_id.\"\n raise exceptions.ValidationError(mes)\n else:\n mes = \"user_id is incorrect.\"\n raise exceptions.ValidationError(mes)\n else:\n mes = \"Must provide user_id.\"\n raise exceptions.ValidationError(mes)\n return Response({\"messgae\": message_set}, status=200)"
},
{
"alpha_fraction": 0.7023809552192688,
"alphanum_fraction": 0.7023809552192688,
"avg_line_length": 19.25,
"blob_id": "34c58693e51af1c7c652a0a6e89023780470ad26",
"content_id": "34f419312aae36333a624d712222fa3da1d92f0c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 168,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 8,
"path": "/queryes/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\n\r\nurlpatterns = [\r\n path('', views.AddQuery.as_view()),\r\n]"
},
{
"alpha_fraction": 0.7239382266998291,
"alphanum_fraction": 0.7335907220840454,
"avg_line_length": 47.33333206176758,
"blob_id": "e47ef4c7f4ab7f43a9176661a380aa0a40fa792c",
"content_id": "e3c0c1f3103ea3beb50f88f6739642a9aea031b0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1036,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 21,
"path": "/reviews/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom accounts.models import DfUser\r\nfrom manage_locations.models import DfBusinessLocation\r\nimport django\r\n# Create your models here.\r\n\r\nclass DfLocationReviews(models.Model):\r\n Df_User = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n Business_Location = models.ForeignKey(DfBusinessLocation, on_delete=models.SET_NULL, null=True, blank=True)\r\n Social_Plateform = models.CharField(max_length=50,null=True,blank=True)\r\n User_Name = models.CharField(max_length=50,null=True,blank=True)\r\n Reating = models.CharField(max_length=50,null=True,blank=True)\r\n Review = models.TextField(null=True,blank=True)\r\n User_Image_URL = models.TextField(max_length=50, null=True, blank=True)\r\n Review_dateTime = models.CharField(max_length=50, null=True, blank=True)\r\n Craete_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return self.User_Name\r\n class Meta:\r\n verbose_name_plural = \"DF Business Reviews\"\r\n"
},
{
"alpha_fraction": 0.8069444298744202,
"alphanum_fraction": 0.8069444298744202,
"avg_line_length": 44.0625,
"blob_id": "98bbd514542ca9740a89f24ac466d07aaa3a47ce",
"content_id": "60a0b8059334781237add1f8071bf327d4d27b90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 720,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 16,
"path": "/manage_jobs/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfJobCategory,DfJobs,DfJobApplaicationSet\n# Register your models here.\n\nclass DfJobCategoryAdmin(ImportExportModelAdmin):\n list_display = ('CategoryName','Create_date')\nadmin.site.register(DfJobCategory,DfJobCategoryAdmin)\n\nclass DfJobsAdmin(ImportExportModelAdmin):\n list_display = ('Category_name','Job_Title','Job_slug','Job_Description','Create_date')\nadmin.site.register(DfJobs,DfJobsAdmin)\n\nclass DfJobApplaicationAdmin(ImportExportModelAdmin):\n list_display = ('Job_title','job_cate','Name','email','contact_no','Application_Date')\nadmin.site.register(DfJobApplaicationSet,DfJobApplaicationAdmin)"
},
{
"alpha_fraction": 0.5416666865348816,
"alphanum_fraction": 0.5833333134651184,
"avg_line_length": 25.086956024169922,
"blob_id": "87b8d7684abb6e8811ce23df2d66f22caea19670",
"content_id": "d96d5f34631a1bf7850206cea8cae8d0f35dcb4c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 600,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 23,
"path": "/manage_campus/migrations/0005_auto_20200811_0951.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-11 09:51\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_campus', '0004_dfcampaign_extera_data'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfcampaign',\n name='Head',\n field=models.CharField(blank=True, max_length=150, null=True),\n ),\n migrations.AddField(\n model_name='dfcampaign',\n name='Subject',\n field=models.CharField(blank=True, max_length=150, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.6179915070533752,
"alphanum_fraction": 0.6213203072547913,
"avg_line_length": 52.09110641479492,
"blob_id": "458c5a569729c34f0d10c62fb490aa6428e20a72",
"content_id": "47fb6e8a802c7d387f4b9d6f672bbe63fc792a5f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 24934,
"license_type": "no_license",
"max_line_length": 399,
"num_lines": 461,
"path": "/manage_locations/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.shortcuts import get_object_or_404\r\nfrom .models import DfBusinessLocation,DfLocationImage,DfLocationPaymentMethod,DfLocationConnectPlatform,DfLocationOpenHours\r\nfrom rest_framework import exceptions\r\nfrom rest_framework.response import Response\r\nfrom accounts.models import DfUser\r\nfrom social_media_platforms.models import DfSocialMedia\r\n\r\n\r\n\r\nclass GetOneLocationSerializersValidate(serializers.Serializer):\r\n location_id = serializers.CharField()\r\n\r\n def validate(self, data):\r\n location_id = data.get(\"location_id\", \"\")\r\n location_data = {}\r\n if location_id:\r\n if DfBusinessLocation.objects.filter(id=location_id).exists():\r\n # location_data = get_object_or_404(DfBusinessLocation, id=location_id)\r\n location_data = get_object_or_404(DfBusinessLocation, id=location_id)\r\n else:\r\n mes = \"location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return location_data\r\n\r\nclass GetAllLocationSerializersValidate(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n get_user_instance = {}\r\n if user_id:\r\n if DfUser.objects.filter(id=user_id).exists():\r\n get_user_instance = get_object_or_404(DfUser, id=user_id)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_user_instance\r\n\r\n\r\nclass GetDfBusinessLocationSerializers(serializers.ModelSerializer):\r\n id = serializers.IntegerField(required=True)\r\n class Meta:\r\n model = DfLocationImage\r\n fields = ['id','Image']\r\n\r\nclass GetDfLocationPaymentSerializers(serializers.ModelSerializer):\r\n id = serializers.IntegerField(required=True)\r\n class Meta:\r\n model = DfLocationPaymentMethod\r\n fields = ['id','Payment_Method']\r\n\r\n\r\nclass GetOpenhourSerializers(serializers.ModelSerializer):\r\n id = serializers.IntegerField(required=True)\r\n class Meta:\r\n model = DfLocationOpenHours\r\n fields = ['id','date','Day','Type','Open_status','start_time_1','end_time_1','start_time_2','end_time_2']\r\n\r\n\r\nclass GetAllLocationSerializers(serializers.ModelSerializer):\r\n Df_location_image = GetDfBusinessLocationSerializers(many=True)\r\n Df_location_payments = GetDfLocationPaymentSerializers(many=True)\r\n Df_location_poen_hour = GetOpenhourSerializers(many=True) \r\n\r\n class Meta:\r\n model = DfBusinessLocation\r\n fields = ['id','DfUser', 'Store_Code', 'Business_Logo', 'Location_name', 'Business_category', 'Additional_catugory',\r\n 'Address_1', 'Address_2', 'Country', 'State', 'City', 'Zipcode', 'Phone_no', 'Website','Franchise_Location','Do_not_publish_my_address',\r\n 'Business_Owner_Name', 'Owner_Email', 'Business_Tagline', 'Year_Of_Incorporation', 'About_Business',\r\n 'Facebook_Profile', 'Instagram_Profile', 'Twitter_Profile','Business_Cover_Image', 'Craete_Date', 'Update_Date','Df_location_payments','Df_location_image','Df_location_poen_hour']\r\n\r\n\r\nclass AddLocationSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Business_Logo = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True,required=False,allow_blank=True)\r\n Business_Cover_Image = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True,required=False,allow_blank=True)\r\n Location_name = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Business_category = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Additional_catugory = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Address_1 = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Address_2 = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Country = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n State = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n City = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Zipcode = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Phone_no = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Franchise_Location = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Do_not_publish_my_address = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Website = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Business_Owner_Name = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Owner_Email = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Business_Tagline = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Year_Of_Incorporation = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n About_Business = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Facebook_Profile = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Instagram_Profile = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Twitter_Profile = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n\r\n class Meta:\r\n model = DfBusinessLocation\r\n fields = ['user_id','Store_Code','Business_Logo','Location_name','Business_category','Additional_catugory','Address_1','Address_2','Country','State','City','Zipcode','Phone_no','Website','Business_Owner_Name','Owner_Email','Business_Tagline','Year_Of_Incorporation','About_Business','Facebook_Profile','Instagram_Profile','Twitter_Profile','Business_Cover_Image','Craete_Date','Update_Date']\r\n\r\n# ====================EditLocationHoursSerializers ========\r\n\r\nclass EditLocationHoursSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n\r\n def validate(self, data):\r\n Location_id = data.get(\"Location_id\", \"\")\r\n user_id = data.get(\"user_id\", \"\")\r\n update_info = None\r\n if Location_id:\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=Location_id).exists():\r\n get_bus_loca_ins = get_object_or_404(DfBusinessLocation, id=Location_id)\r\n if get_bus_loca_ins.DfUser.id == get_Dfuser_ins.id:\r\n update_info = get_bus_loca_ins\r\n else:\r\n mes = \"Location_id is not related to current login user.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"user is not login.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return update_info\r\n# ====================EditLocationHoursSerializers ========\r\n\r\n\r\n\r\n# ====================EditLocationBusinessSerializers ========\r\n\r\nclass EditLocationBusinessSerializers(serializers.Serializer):\r\n Location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Business_Owner_Name = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Owner_Email = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Business_Tagline = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Year_Of_Incorporation = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n About_Business = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Facebook_Profile = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Instagram_Profile = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n Twitter_Profile = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True)\r\n\r\n def validate(self, data):\r\n Location_id = data.get(\"Location_id\", \"\")\r\n Business_Owner_Name = data.get(\"Business_Owner_Name\", \"\")\r\n Owner_Email = data.get(\"Owner_Email\", \"\")\r\n Business_Tagline = data.get(\"Business_Tagline\", \"\")\r\n Year_Of_Incorporation = data.get(\"Year_Of_Incorporation\", \"\")\r\n About_Business = data.get(\"About_Business\", \"\")\r\n Facebook_Profile = data.get(\"Facebook_Profile\", \"\")\r\n Instagram_Profile = data.get(\"Instagram_Profile\", \"\")\r\n Twitter_Profile = data.get(\"Twitter_Profile\", \"\")\r\n update_info = \"\"\r\n if Location_id:\r\n if DfBusinessLocation.objects.filter(id=Location_id).exists():\r\n DfBusinessLocation.objects.filter(id=Location_id).update(\r\n Business_Owner_Name = Business_Owner_Name,\r\n Owner_Email = Owner_Email,\r\n Business_Tagline = Business_Tagline,\r\n Year_Of_Incorporation = Year_Of_Incorporation,\r\n About_Business = About_Business,\r\n Facebook_Profile=Facebook_Profile,\r\n Instagram_Profile=Twitter_Profile,\r\n Twitter_Profile=Twitter_Profile\r\n )\r\n update_info = \"Business info update successfully\"\r\n else:\r\n mes = \"location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return update_info\r\n# ====================EditLocationBusinessSerializers ========\r\n\r\n\r\n\r\n\r\n\r\n\r\n# ====================EditLocationpaymentMethodSerializers ========\r\n\r\nclass EditLocationpaymentMethodSerializers(serializers.Serializer):\r\n Location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n Location_id = data.get(\"Location_id\", \"\")\r\n Location_instance = \"\"\r\n if Location_id:\r\n if DfBusinessLocation.objects.filter(id=Location_id).exists():\r\n Location_instance = DfBusinessLocation.objects.filter(id=Location_id)\r\n else:\r\n mes = \"location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return Location_instance\r\n# ====================EditLocationBusinessSerializers ========\r\n\r\nclass LocationWithSocialMediaSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n platform_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Connection_Status = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=False,allow_blank=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n platform_id = data.get(\"platform_id\", \"\")\r\n Connection_Status = data.get(\"Connection_Status\", \"\")\r\n message = \"\"\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,DfUser=get_Dfuser_ins)\r\n if DfSocialMedia.objects.filter(id=platform_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfSocialMedia_ins = get_object_or_404(DfSocialMedia, id=platform_id,DfUser=get_Dfuser_ins)\r\n if DfLocationConnectPlatform.objects.filter(Business_Location=get_DfBusinessLocation_ins).filter(Social_Platform=get_DfSocialMedia_ins).exists():\r\n get_LCP_INS = get_object_or_404(DfLocationConnectPlatform , Business_Location=get_DfBusinessLocation_ins,Social_Platform=get_DfSocialMedia_ins)\r\n DfLocationConnectPlatform.objects.filter(id=get_LCP_INS.id).update(\r\n DfUser=get_Dfuser_ins,\r\n Business_Location=get_DfBusinessLocation_ins,\r\n Social_Platform=get_DfSocialMedia_ins,\r\n Connection_Status=Connection_Status\r\n )\r\n message = \"Location connection update.\"\r\n else:\r\n connect_plat = DfLocationConnectPlatform(\r\n DfUser = get_Dfuser_ins,\r\n Business_Location = get_DfBusinessLocation_ins,\r\n Social_Platform = get_DfSocialMedia_ins,\r\n Connection_Status = Connection_Status\r\n )\r\n connect_plat.save()\r\n message = \"Location connect with social media.\"\r\n else:\r\n mes = \"Your platform_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n return message\r\n\r\nclass LocationRemoveWithSocialMediaSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_connect_social_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_connect_social_id = data.get(\"location_connect_social_id\", \"\")\r\n message = \"\"\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfLocationConnectPlatform.objects.filter(id=location_connect_social_id).exists():\r\n get_CMC_ins = get_object_or_404(DfLocationConnectPlatform, id=location_connect_social_id)\r\n if get_CMC_ins.DfUser.id == get_Dfuser_ins.id:\r\n DfLocationConnectPlatform.objects.filter(id=location_connect_social_id).delete()\r\n message = \"Connection remove with social media platform.\"\r\n else:\r\n mes = \"This location_connect_social_id is not related to current login user.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your platform_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your location_connect_social_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n return message\r\n\r\n\r\n\r\nclass RemoveLocationByIdSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n message = \"\"\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).exists():\r\n get_BL_ins = get_object_or_404(DfBusinessLocation,id=location_id)\r\n if get_BL_ins.DfUser.id == get_Dfuser_ins.id:\r\n get_ins = DfBusinessLocation.objects.get(id=location_id)\r\n DfLocationConnectPlatform.objects.filter(Business_Location=get_ins).delete()\r\n DfLocationImage.objects.filter(Business_Location=get_ins).delete()\r\n DfLocationOpenHours.objects.filter(Business_Location=get_ins).delete()\r\n DfLocationPaymentMethod.objects.filter(Business_Location=get_ins).delete()\r\n DfBusinessLocation.objects.filter(id=location_id).delete() \r\n message = \"Business Location remove successfully.\"\r\n else:\r\n mes = \"This location_id is not related to current login user.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your location_connect_social_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n return message\r\n\r\n\r\n\r\nclass GetAllConnectionOfOneLocationSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n get_data = None\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).exists():\r\n get_BL_ins = get_object_or_404(DfBusinessLocation,id=location_id)\r\n if get_BL_ins.DfUser.id == get_Dfuser_ins.id:\r\n if DfLocationConnectPlatform.objects.filter(Business_Location=get_BL_ins).exists():\r\n\r\n get_data = DfLocationConnectPlatform.objects.filter(Business_Location=get_BL_ins)\r\n \r\n else:\r\n mes = \"This location_id is not related to current login user.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your location_connect_social_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_data\r\n\r\nclass GetConnectionWithCocialMediaSerializers(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfLocationConnectPlatform\r\n fields = ['id', 'Connection_Status', 'Craete_Date', 'Update_Date', 'Business_Location',\r\n 'Social_Platform', 'DfUser']\r\n depth = 1\r\n\r\nclass UpdateImagesFilesByLocationIdSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Business_Logo = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True,required=False,allow_blank=True)\r\n Business_Cover_Image = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True,required=False,allow_blank=True)\r\n Other_Image = serializers.CharField(style={\"inpupt_type\":\"text\"},write_only=True,required=False,allow_blank=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n location_id_get =None\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,DfUser=get_Dfuser_ins)\r\n df_bl_dta= DfBusinessLocation.objects.get(id=location_id)\r\n location_id_get = df_bl_dta\r\n else:\r\n mes = \"Your location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n else:\r\n mes = \"Your user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n return location_id_get \r\n\r\n\r\n\r\nclass UpdateImagesFilesByLocationIdImageIdSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n image_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n image = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n image_id = data.get(\"image_id\", \"\")\r\n image = data.get(\"image\", \"\")\r\n location_id_get =None\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,DfUser=get_Dfuser_ins)\r\n df_bl_dta= DfBusinessLocation.objects.get(id=location_id)\r\n location_id_get = df_bl_dta\r\n else:\r\n mes = \"Your location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n else:\r\n mes = \"Your user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n return location_id_get \r\n\r\n\r\nclass RemoveImagesFilesByLocationIdImageIdSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n image_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n image_id = data.get(\"image_id\", \"\")\r\n location_id_get =None\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,DfUser=get_Dfuser_ins)\r\n df_bl_dta= DfBusinessLocation.objects.get(id=location_id)\r\n location_id_get = df_bl_dta\r\n else:\r\n mes = \"Your location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n else:\r\n mes = \"Your user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n return location_id_get \r\n\r\n\r\nclass GetOpneHourByLocationIdViewSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n set_type = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True) \r\n\r\n def validate(self, data):\r\n location_id_get = None\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n set_type = data.get(\"set_type\", \"\") \r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfBusinessLocation.objects.filter(id=location_id).filter(DfUser=get_Dfuser_ins).exists():\r\n get_DfBusinessLocation_ins = get_object_or_404(DfBusinessLocation, id=location_id,DfUser=get_Dfuser_ins)\r\n df_bl_dta= DfBusinessLocation.objects.get(id=location_id)\r\n location_id_get = df_bl_dta\r\n else:\r\n mes = \"Your location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes) \r\n else:\r\n mes = \"Your user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n return location_id_get"
},
{
"alpha_fraction": 0.7644287347793579,
"alphanum_fraction": 0.7644287347793579,
"avg_line_length": 59.71428680419922,
"blob_id": "3cf07e4c5ee838df703615677ab601078704ca0d",
"content_id": "65ed1aa72fdd3c1f5e3793b44aeb872916cd7445",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 849,
"license_type": "no_license",
"max_line_length": 201,
"num_lines": 14,
"path": "/manage_orders_and_payments/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfOrders ,DfOrdersAndPayment\n# Register your models here.\n\nclass DfOrdersAdmin(ImportExportModelAdmin):\n list_display = ('Order_id','DfUser','Package','Final_Amount','Duration_Time','Duration_Type','Create_Date','Payment','Payment_Type','Transaction_id','Payment_Date','Active','Start_Date','End_Date')\n readonly_fields = [\"Order_id\"]\nadmin.site.register(DfOrders,DfOrdersAdmin)\n\nclass DfOrdersAndPaymentAdmin(ImportExportModelAdmin):\n list_display = ('Order_id','DfUser','Package','Final_Amount','Duration_Time','Duration_Type','Create_Date','Payment','Payment_Type','Transaction_id','Payment_Date','Active','Start_Date','End_Date')\n readonly_fields = [\"Order_id\"]\nadmin.site.register(DfOrdersAndPayment,DfOrdersAndPaymentAdmin)"
},
{
"alpha_fraction": 0.5078864097595215,
"alphanum_fraction": 0.5630914568901062,
"avg_line_length": 26.565217971801758,
"blob_id": "26794b56d8df175c1c9237469cd9ee4f70b5d14f",
"content_id": "2d69695c0acd11757f011ff88776cdd3cd1f9c00",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 634,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 23,
"path": "/manage_pricing/migrations/0004_auto_20200922_1146.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 11:46\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0003_auto_20200922_1110'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='dfprice',\n name='priyorty',\n field=models.IntegerField(default=0),\n ),\n migrations.AlterField(\n model_name='dfprice',\n name='Duration_Type',\n field=models.CharField(blank=True, choices=[('D', 'Days'), ('M', 'Month'), ('Y', 'Year')], max_length=120, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.564453125,
"avg_line_length": 22.272727966308594,
"blob_id": "58626656097151eb2865d5a57bfda36eadbbdd64",
"content_id": "f8f302b09b3f96be0c97d3e2a69b4a162bde40e5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 512,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 22,
"path": "/accounts/migrations/0007_auto_20200806_1201.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-06 12:01\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('accounts', '0006_auto_20200806_1141'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='dfuser',\n name='Pnone',\n ),\n migrations.AddField(\n model_name='dfuser',\n name='Phone',\n field=models.IntegerField(blank=True, max_length=20, null=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5795019268989563,
"alphanum_fraction": 0.6091954112052917,
"avg_line_length": 31.625,
"blob_id": "a44c0b174d59593dfce125da7ef935e32b98ff16",
"content_id": "53191e8a4501c7d1569b64682bd99a8c6103b7de",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1044,
"license_type": "no_license",
"max_line_length": 165,
"num_lines": 32,
"path": "/manage_jobs/migrations/0003_auto_20200919_1225.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-19 12:25\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_jobs', '0002_auto_20200919_1201'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='dfjobapplaication',\n name='Job',\n ),\n migrations.RemoveField(\n model_name='dfjobapplaication',\n name='job_category',\n ),\n migrations.AddField(\n model_name='dfjobapplaication',\n name='Job_title',\n field=models.ForeignKey(default='', on_delete=django.db.models.deletion.SET_DEFAULT, related_name='Job_DfJobs', to='manage_jobs.DfJobs'),\n ),\n migrations.AddField(\n model_name='dfjobapplaication',\n name='job_cate',\n field=models.ForeignKey(default='', on_delete=django.db.models.deletion.SET_DEFAULT, related_name='AwWineType_DfApplay', to='manage_jobs.DfJobCategory'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5806093215942383,
"alphanum_fraction": 0.6017345190048218,
"avg_line_length": 60.45833206176758,
"blob_id": "ef9dec61266134ef9962aa76dbc7c884eb00b87d",
"content_id": "b7438c3b110676b735f6be47d16353894681fcb0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4497,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 72,
"path": "/manage_locations/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-10 09:33\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\nimport manage_locations.models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n ('accounts', '0005_auto_20200410_1503'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfLocationImage',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Image', models.ImageField(upload_to=manage_locations.models.user_directory_path_for_other_image)),\r\n ('Craete_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Update_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Business_Location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Location Other Image',\r\n },\r\n ),\r\n migrations.CreateModel(\r\n name='DfBusinessLocation',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Store_Code', models.CharField(max_length=50)),\r\n ('Business_Logo', models.ImageField(blank=True, null=True, upload_to=manage_locations.models.user_directory_path)),\r\n ('Location_name', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Business_catugory', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Additional_catugory', models.TextField(blank=True, null=True)),\r\n ('Address_1', models.TextField(blank=True, max_length=120, null=True)),\r\n ('Address_2', models.TextField(blank=True, max_length=120, null=True)),\r\n ('Country', models.CharField(blank=True, max_length=120, null=True)),\r\n ('State', models.CharField(blank=True, max_length=120, null=True)),\r\n ('City', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Zipcode', models.IntegerField(blank=True, null=True)),\r\n ('Phone_no', models.IntegerField(blank=True, null=True)),\r\n ('Website', models.URLField(blank=True, null=True)),\r\n ('Business_Owner_Name', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Owner_Email', models.EmailField(blank=True, max_length=254, null=True)),\r\n ('Business_Tagline', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Year_Of_Incorporation', models.IntegerField()),\r\n ('About_Business', models.IntegerField()),\r\n ('Facebook_Profile', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Instagram_Profile', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Twitter_Profile', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Monday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Tuseday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Wednesday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Thursday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Friday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Saturday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Operating_Hours_Sunday', models.CharField(blank=True, max_length=120, null=True)),\r\n ('Business_Cover_Image', models.ImageField(upload_to=manage_locations.models.user_directory_path_for_banner)),\r\n ('Craete_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Update_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Business Location',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.5936455130577087,
"alphanum_fraction": 0.6454849243164062,
"avg_line_length": 29.473684310913086,
"blob_id": "13173e04aa1e18d9057661e1106c23d9eeb74600",
"content_id": "07978bcd9b08552f6037b1b46f9c035e3234c97a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 598,
"license_type": "no_license",
"max_line_length": 181,
"num_lines": 19,
"path": "/manage_locations/migrations/0006_auto_20200411_1556.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-11 10:26\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('manage_locations', '0005_auto_20200410_1800'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='dflocationimage',\r\n name='Business_Location',\r\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Df_location_image', to='manage_locations.DfBusinessLocation'),\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.5859155058860779,
"alphanum_fraction": 0.6187793612480164,
"avg_line_length": 35.72413635253906,
"blob_id": "0dd2a34a758a984af5087bb9b01f28535027f072",
"content_id": "2404cba1d142c999bab55d7af0c18638759667d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1065,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 29,
"path": "/manage_campus/migrations/0009_dfuploadimage.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-08-15 12:49\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\nimport manage_campus.models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('accounts', '0007_auto_20200806_1201'),\n ('manage_campus', '0008_dfuseremail_contact'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfUploadImage',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('UploadFile', models.ImageField(blank=True, null=True, upload_to=manage_campus.models.upload_image_path_for_banner)),\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\n ],\n options={\n 'verbose_name_plural': 'DF Upload Image',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.5896136164665222,
"alphanum_fraction": 0.5933310985565186,
"avg_line_length": 41.30244064331055,
"blob_id": "2deb78fa4a68a0c7bedfd9d15da85bcac5e4f039",
"content_id": "6a7888a3689b44428bf9ca23392bc94a603943f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8877,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 205,
"path": "/manage_campus/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.shortcuts import get_object_or_404\r\nfrom .models import DfCampaign,DfUseremail,DfUploadImage\r\nfrom rest_framework import exceptions\r\nfrom rest_framework.response import Response\r\nfrom manage_locations.models import DfBusinessLocation\r\nfrom accounts.models import DfUser\r\nimport base64\r\nfrom django.core.files.base import ContentFile\r\nfrom datetime import date\r\n\r\n\r\n\r\nclass GetAllEmailSerializersData(serializers.ModelSerializer):\r\n\r\n\r\n class Meta:\r\n model = DfUseremail\r\n fields = ['id','DfUser', 'Campign', 'Email', 'Contact', 'Name', 'mail_sent_status','Sent_date']\r\n\r\n\r\n\r\nclass GetAllEmailSerializersCheckCampaignid(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n camp_id = serializers.CharField()\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n campaign_id = data.get(\"camp_id\", \"\")\r\n get_campaign_ins = {}\r\n\r\n if user_id:\r\n if DfUser.objects.filter(user__id=user_id).exists():\r\n get_user_instance = get_object_or_404(DfUser, user__id=user_id)\r\n if campaign_id:\r\n if DfCampaign.objects.filter(id=campaign_id, DfUser=get_user_instance).exists():\r\n get_campaign_ins = get_object_or_404(DfCampaign, id=campaign_id, DfUser=get_user_instance)\r\n else:\r\n mes = \"campaign_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide campaign_id.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_campaign_ins\r\n\r\n\r\n\r\n\r\nclass UploadImageViewSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n UploadFile = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n\r\n class Meta:\r\n model = DfUploadImage\r\n fields = ['user_id','UploadFile']\r\n\r\n\r\nclass AddCampaignSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Title = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Sent_from = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n replay_to = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n message = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n sms_message = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n Image = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n Extera_data = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n Head = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n Subject = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True,required=True,allow_blank=True)\r\n\r\n\r\n class Meta:\r\n model = DfCampaign\r\n fields = ['user_id','Title','Head','Subject', 'Sent_from', 'replay_to', 'message','Image','sms_message','Extera_data']\r\n\r\n\r\n\r\nclass GetAllCampaignSerializers(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n get_user_instance = {}\r\n if user_id:\r\n if DfUser.objects.filter(user__id=user_id).exists():\r\n get_user_instance = get_object_or_404(DfUser, user__id=user_id)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_user_instance\r\n\r\n\r\nclass GetAllCampaignSerializersData(serializers.ModelSerializer):\r\n\r\n\r\n class Meta:\r\n model = DfCampaign\r\n fields = ['id','DfUser', 'BusinessLocation', 'Title', 'Sent_from', 'replay_to', 'message','Image','sms_message','Extera_data','Create_date']\r\n\r\n\r\n\r\n\r\nclass AddCampaignEmailSerializers(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n camp_id = serializers.CharField()\r\n emails = serializers.CharField()\r\n names = serializers.CharField()\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n camp_id = data.get(\"camp_id\", \"\")\r\n emails = data.get(\"emails\", \"\")\r\n names = data.get(\"names\", \"\")\r\n message = \"\"\r\n if user_id:\r\n if DfUser.objects.filter(user__id=user_id).exists():\r\n get_user_instance = get_object_or_404(DfUser, user__id=user_id)\r\n if campaign_id:\r\n if DfCampaign.objects.filter(id=campaign_id,DfUser=get_user_instance).exists():\r\n get_campaign_ins = get_object_or_404(DfCampaign,id=campaign_id,DfUser=get_user_instance)\r\n for i in range(0, len(emails)):\r\n ass_emails = DfUseremail(DfUser=get_user_instance,Campign=get_campaign_ins,Email=emails[i],Name=names[i])\r\n ass_emails.save()\r\n message = \"Email add in database successfully.\"\r\n else:\r\n mes = \"campaign_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide campaign_id.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_campaign_ins\r\n\r\n\r\n\r\nclass GetAllCampaignSerializersCheckCampaignid(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n camp_id = serializers.CharField()\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n campaign_id = data.get(\"camp_id\", \"\")\r\n get_campaign_ins = {}\r\n \r\n if user_id:\r\n if DfUser.objects.filter(user__id=user_id).exists():\r\n get_user_instance = get_object_or_404(DfUser, user__id=user_id)\r\n if campaign_id:\r\n if DfCampaign.objects.filter(id=campaign_id,DfUser=get_user_instance).exists():\r\n get_campaign_ins = get_object_or_404(DfCampaign,id=campaign_id,DfUser=get_user_instance)\r\n else:\r\n mes = \"campaign_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide campaign_id.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_campaign_ins\r\n\r\n\r\n\r\nclass RemovecampaignByIdSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n campaign_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n campaign_id = data.get(\"campaign_id\", \"\")\r\n message = \"\"\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_Dfuser_ins = get_object_or_404(DfUser, user_id=user_id)\r\n if DfCampaign.objects.filter(id=campaign_id).exists():\r\n get_campaign_ins = get_object_or_404(DfCampaign,id=campaign_id)\r\n if get_campaign_ins.DfUser.id == get_Dfuser_ins.id:\r\n DfCampaign.objects.filter(id=campaign_id).delete()\r\n message = \"Campaign remove successfully.\"\r\n else:\r\n mes = \"This campaign_id is not related to current login user.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"campaign_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Your user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n return message\r\n"
},
{
"alpha_fraction": 0.7070422768592834,
"alphanum_fraction": 0.7070422768592834,
"avg_line_length": 45.46666717529297,
"blob_id": "7587a739d9694fc986a2591661c034ead48b68c3",
"content_id": "49818d0a222e024093288997e13e0a35b5916310",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 710,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 15,
"path": "/manage_campus/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\n\r\nurlpatterns= [\r\n path('add-campaign', views.AddCampaignView.as_view()),\r\n path('get-all-campaign', views.GetAllCampaignView.as_view()),\r\n path('get-campaign-by-id', views.GetCampaignByIdView.as_view()),\r\n path('remove-campaign-by-id', views.RemoveCampaignByIdView.as_view()),\r\n path('remove-email-from-campaign-by-id', views.RemoveEmailByCampaignIdView.as_view()),\r\n path('add-emails-in-campaign', views.AddCampaignEmailView.as_view()),\r\n path('get-emails-by-campaign', views.GetEmailByIdView.as_view()),\r\n path('send-emaills', views.SendEmailsView.as_view()),\r\n path('upload-image-get-url', views.UploadImageView.as_view()),\r\n\r\n]"
},
{
"alpha_fraction": 0.6498696804046631,
"alphanum_fraction": 0.6602953672409058,
"avg_line_length": 25.159090042114258,
"blob_id": "0799a4a706a029fc6ebb968adc40d3f0de01a76e",
"content_id": "fd8c689aaaabeca215979276c3fdcbc9c8297653",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1151,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 44,
"path": "/manage_pricing/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport django\n\n# Create your models here.\nPACKAGE_CHOICES = (\n ('S','Start'),\n ('B', 'Business'),\n ('P', 'Professional'),\n ('M', 'Max'),\n)\n\n\nDURATION_CHOICES = (\n ('D','Days'),\n ('M', 'Month'),\n ('Y', 'Year'),\n)\n\n\nclass DfPackageName(models.Model):\n name = models.CharField(max_length=120,unique=True)\n keyword = models.CharField(max_length=120,unique=True)\n\n def __str__(self):\n return self.name\n\n class Meta:\n verbose_name_plural = \"DF Package\"\n\nclass DfPrice(models.Model):\n Package_Type = models.OneToOneField(DfPackageName, on_delete=models.SET_NULL,unique=True,null=True)\n Price = models.FloatField(default=0)\n Duration_Type = models.CharField(max_length=120,choices=DURATION_CHOICES,null=True, blank=True)\n Duration_time = models.IntegerField(default=0)\n Start = models.BooleanField(default=True)\n Orders_set = models.IntegerField(default=0,unique=True)\n Create_Date = models.DateTimeField(default=django.utils.timezone.now)\n\n\n def __str__(self):\n return str(self.Package_Type)\n\n class Meta:\n verbose_name_plural = \"DF Price\"\n"
},
{
"alpha_fraction": 0.5637462139129639,
"alphanum_fraction": 0.5981873273849487,
"avg_line_length": 43.97222137451172,
"blob_id": "204c284aebcdfa2a17e690a3d505a56b50459c0f",
"content_id": "a424fdd826a805b3bc9332acc904a8984cbcd8ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1655,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 36,
"path": "/reviews/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-20 09:19\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n ('manage_locations', '0009_auto_20200413_1648'),\r\n ('accounts', '0005_auto_20200410_1503'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='DfLocationReviews',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('Social_Plateform', models.CharField(blank=True, max_length=50, null=True)),\r\n ('User_Name', models.CharField(blank=True, max_length=50, null=True)),\r\n ('Reating', models.CharField(blank=True, max_length=50, null=True)),\r\n ('Review', models.TextField(blank=True, null=True)),\r\n ('User_Image_URL', models.TextField(blank=True, max_length=50, null=True)),\r\n ('Review_dateTime', models.CharField(blank=True, max_length=50, null=True)),\r\n ('Craete_Date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('Business_Location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_locations.DfBusinessLocation')),\r\n ('User', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\r\n ],\r\n options={\r\n 'verbose_name_plural': 'DF Business Reviews',\r\n },\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.7158160209655762,
"alphanum_fraction": 0.7233774662017822,
"avg_line_length": 35.906978607177734,
"blob_id": "7c832cefb15dba63383cf66e8ca890e9189e1043",
"content_id": "364fd991fb7facdc21d31a6bc5c37762f9b94d16",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1587,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 43,
"path": "/manage_jobs/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport django\nfrom autoslug import AutoSlugField\nfrom datetime import date\n# Create your models here.\n\nclass DfJobCategory(models.Model):\n CategoryName = models.CharField(max_length=120,unique=True)\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\n\n def __str__(self):\n return self.CategoryName\n\n class Meta:\n verbose_name_plural = \"DF Job Category\"\n\nclass DfJobs(models.Model):\n Category_name = models.ForeignKey(DfJobCategory, on_delete=models.SET_NULL, null=True, blank=True,related_name='AwWineType_DfJobs')\n Job_Title = models.CharField(max_length=120)\n Job_slug = AutoSlugField(populate_from='Job_Title', always_update=True, unique_with='Create_date__month',null=True, blank=True)\n Job_Description = models.TextField()\n Create_date = models.DateTimeField(default=django.utils.timezone.now)\n\n\n def __str__(self):\n return self.Job_Title\n\n class Meta:\n verbose_name_plural = \"DF Jobs\"\n\nclass DfJobApplaicationSet(models.Model):\n Job_title = models.ForeignKey(DfJobs, on_delete=models.CASCADE, related_name='Job_DfJobs')\n job_cate = models.ForeignKey(DfJobCategory, on_delete=models.CASCADE,related_name='AwWineType_DfApplay' )\n Name = models.CharField(max_length=120)\n email = models.EmailField(max_length=120)\n contact_no = models.BigIntegerField()\n Application_Date = models.DateTimeField(default=django.utils.timezone.now)\n\n def __str__(self):\n return str(self.Job_title)\n\n class Meta:\n verbose_name_plural = \"DF Job Applaication\"\n"
},
{
"alpha_fraction": 0.6054243445396423,
"alphanum_fraction": 0.6255468130111694,
"avg_line_length": 36.099998474121094,
"blob_id": "f70d9ca37fad1faa70fa62f32d4cecc9c6d585e8",
"content_id": "1a4a1feb7e8b2587426a0963ed6b9e73410d3abd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1143,
"license_type": "no_license",
"max_line_length": 152,
"num_lines": 30,
"path": "/manage_locations/migrations/0002_auto_20200410_1653.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-10 11:23\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('manage_dropdown_value', '0002_delete_dfcity'),\r\n ('manage_locations', '0001_initial'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='dfbusinesslocation',\r\n name='Business_catugory',\r\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_dropdown_value.DfBusinessCategory'),\r\n ),\r\n migrations.AlterField(\r\n model_name='dfbusinesslocation',\r\n name='Country',\r\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_dropdown_value.DfCountry'),\r\n ),\r\n migrations.AlterField(\r\n model_name='dfbusinesslocation',\r\n name='State',\r\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_dropdown_value.DfState'),\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.712905764579773,
"alphanum_fraction": 0.7225652933120728,
"avg_line_length": 53.90265655517578,
"blob_id": "201bee52aea91179fc08c36f8c8c1f8d5843e18c",
"content_id": "83cc057dff5be419e04b7e9e2bb5421617c6341e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6315,
"license_type": "no_license",
"max_line_length": 150,
"num_lines": 113,
"path": "/manage_locations/models.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom accounts.models import DfUser\r\nimport django\r\nfrom datetime import date\r\nfrom manage_dropdown_value.models import DfBusinessCategory,DfCountry,DfState\r\nfrom social_media_platforms.models import DfSocialMedia\r\n# Create your models here.\r\ndef user_directory_path(instance, filename):\r\n project_id_in_list = instance.Store_Code.split(\" \")\r\n today_date = date.today()\r\n project_id_in_string = '_'.join([str(elem) for elem in project_id_in_list])\r\n return '{0}/{1}'.format(project_id_in_string+\"/locations/logo/\"+str(today_date.year)+\"/\"+str(today_date.month)+\"/\"+str(today_date.day),filename)\r\n\r\ndef user_directory_path_for_banner(instance, filename):\r\n project_id_in_list = instance.Store_Code.split(\" \")\r\n today_date = date.today()\r\n project_id_in_string = '_'.join([str(elem) for elem in project_id_in_list])\r\n return '{0}/{1}'.format(project_id_in_string+\"/locations/banner/\"+str(today_date.year)+\"/\"+str(today_date.month)+\"/\"+str(today_date.day),filename)\r\n\r\nclass DfBusinessLocation(models.Model):\r\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n Store_Code = models.CharField(max_length=50)\r\n Business_Logo = models.ImageField(upload_to=user_directory_path,null=True,blank=True)\r\n Location_name = models.CharField(max_length=120,null=True,blank=True)\r\n Business_category = models.ForeignKey(DfBusinessCategory, on_delete=models.SET_NULL, null=True, blank=True)\r\n Additional_catugory = models.TextField(null=True,blank=True)\r\n Address_1 = models.TextField(max_length=120,null=True,blank=True)\r\n Address_2 = models.TextField(max_length=120,null=True,blank=True)\r\n Country = models.ForeignKey(DfCountry, on_delete=models.SET_NULL, null=True, blank=True)\r\n State = models.ForeignKey(DfState, on_delete=models.SET_NULL, null=True, blank=True)\r\n City = models.CharField(max_length=120, null=True, blank=True)\r\n Zipcode = models.IntegerField(null=True, blank=True)\r\n Phone_no = models.IntegerField(null=True , blank=True)\r\n Website = models.URLField(null=True,blank=True)\r\n Franchise_Location = models.BooleanField(default=True)\r\n Do_not_publish_my_address = models.BooleanField(default=True)\r\n Business_Owner_Name = models.CharField(max_length=120,null=True,blank=True)\r\n Owner_Email = models.EmailField(null=True,blank=True)\r\n Business_Tagline = models.CharField(max_length=120,null=True,blank=True)\r\n Year_Of_Incorporation = models.IntegerField()\r\n About_Business = models.TextField(null=True,blank=True)\r\n Facebook_Profile = models.CharField(max_length=120,null=True,blank=True)\r\n Instagram_Profile = models.CharField(max_length=120,null=True,blank=True)\r\n Twitter_Profile = models.CharField(max_length=120,null=True,blank=True)\r\n Business_Cover_Image = models.ImageField(upload_to=user_directory_path_for_banner)\r\n Craete_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n Update_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n def __str__(self):\r\n return self.Store_Code\r\n class Meta:\r\n verbose_name_plural = \"DF Business Location\"\r\n\r\n\r\ndef user_directory_path_for_other_image(instance, filename):\r\n project_id_in_list = instance.Business_Location.Store_Code.split(\" \")\r\n project_id_in_string = '_'.join([str(elem) for elem in project_id_in_list])\r\n today_date = date.today()\r\n return '{0}/{1}'.format(project_id_in_string+\"/locations/other/\"+str(today_date.year)+\"/\"+str(today_date.month)+\"/\"+str(today_date.day),filename)\r\n\r\nclass DfLocationImage(models.Model):\r\n Business_Location = models.ForeignKey(DfBusinessLocation, related_name=\"Df_location_image\", on_delete=models.SET_NULL, null=True, blank=True)\r\n Image = models.ImageField(upload_to=user_directory_path_for_other_image)\r\n Craete_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n Update_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n def __str__(self):\r\n return str(self.Image)\r\n class Meta:\r\n verbose_name_plural = \"DF Location Other Image\"\r\n\r\n\r\n\r\nclass DfLocationOpenHours(models.Model):\r\n Business_Location = models.ForeignKey(DfBusinessLocation, related_name=\"Df_location_poen_hour\",on_delete=models.SET_NULL, null=True, blank=True)\r\n date = models.CharField(max_length=20,null=True,blank=True)\r\n Day = models.CharField(max_length=20,null=True,blank=True)\r\n Type = models.CharField(max_length=20,null=True,blank=True)\r\n Open_status = models.CharField(max_length=20,null=True,blank=True)\r\n start_time_1 = models.CharField(max_length=20,null=True,blank=True)\r\n end_time_1 = models.CharField(max_length=20,null=True,blank=True)\r\n start_time_2 = models.CharField(max_length=20,null=True,blank=True)\r\n end_time_2 = models.CharField(max_length=20,null=True,blank=True)\r\n Update_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return str(self.Day)\r\n class Meta:\r\n verbose_name_plural = \"DF Location Open Hours\"\r\n\r\n\r\n\r\nclass DfLocationPaymentMethod(models.Model):\r\n Business_Location = models.ForeignKey(DfBusinessLocation, related_name=\"Df_location_payments\",on_delete=models.SET_NULL, null=True, blank=True)\r\n Payment_Method = models.CharField(max_length=20, null=True,blank=True)\r\n\r\n def __str__(self):\r\n return str(self.Payment_Method)\r\n\r\n class Meta:\r\n verbose_name_plural = \"DF Location Payment Method\"\r\n\r\n\r\nclass DfLocationConnectPlatform(models.Model):\r\n DfUser = models.ForeignKey(DfUser, on_delete=models.SET_NULL, null=True, blank=True)\r\n Business_Location = models.ForeignKey(DfBusinessLocation, related_name=\"Df_location_connectWith\",on_delete=models.SET_NULL, null=True, blank=True)\r\n Social_Platform = models.ForeignKey(DfSocialMedia, related_name=\"Df_location_connectWith\",on_delete=models.SET_NULL, null=True, blank=True)\r\n Connection_Status = models.CharField(max_length=20,null=True,blank=True)\r\n Craete_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n Update_Date = models.DateTimeField(default=django.utils.timezone.now)\r\n\r\n def __str__(self):\r\n return str(self.DfUser)\r\n class Meta:\r\n verbose_name_plural = \"DF Business_Location Connect With Social Media\""
},
{
"alpha_fraction": 0.5221579670906067,
"alphanum_fraction": 0.5876685976982117,
"avg_line_length": 27.83333396911621,
"blob_id": "c64b86b7f3d3de5837e9e0f5b2206c5c61b9844a",
"content_id": "74fa7b0592190bd1caa62f3903931d8cff0f163c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 519,
"license_type": "no_license",
"max_line_length": 169,
"num_lines": 18,
"path": "/manage_pricing/migrations/0006_auto_20200922_1207.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 12:07\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0005_auto_20200922_1205'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfprice',\n name='Package_Type',\n field=models.CharField(blank=True, choices=[('S', 'Start'), ('B', 'Business'), ('P', 'Professional'), ('M', 'Max')], max_length=120, null=True, unique=True),\n ),\n ]\n"
},
{
"alpha_fraction": 0.8026315569877625,
"alphanum_fraction": 0.8026315569877625,
"avg_line_length": 37.125,
"blob_id": "de52095604d55afa721bd7f66081c172d6f0f171",
"content_id": "3f7acaba84a8c63a63aa447970525b96a6914b4b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 304,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 8,
"path": "/manage_bloges/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export.admin import ImportExportModelAdmin\nfrom .models import DfBlogs\n# Register your models here.\n\nclass DfBlogsAdmin(ImportExportModelAdmin):\n list_display = ('Blog_Title','Blog_slug','Blog_Image','Create_date')\nadmin.site.register(DfBlogs,DfBlogsAdmin)"
},
{
"alpha_fraction": 0.7430278658866882,
"alphanum_fraction": 0.7430278658866882,
"avg_line_length": 40,
"blob_id": "fd3d70c4f7e84c37e7adfc96f184b18328bc6397",
"content_id": "9f6ec19f5a595e039f9838e1e3fb3ba72d34bb02",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 502,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 12,
"path": "/social_media_platforms/admin.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nfrom .models import DfSocialMedia\r\nfrom import_export.admin import ImportExportModelAdmin\r\n# Register your models here.\r\n\r\nclass DfSocialMediaAdmin(ImportExportModelAdmin):\r\n search_fields = ['Platform']\r\n list_display = ('DfUser','Platform','Token','Username','Email','Password','Connect_status','Other_info','Craete_Date','Update_Date')\r\n list_filter = ('Connect_status','DfUser','Craete_Date',)\r\n\r\n\r\nadmin.site.register(DfSocialMedia, DfSocialMediaAdmin)"
},
{
"alpha_fraction": 0.5475924015045166,
"alphanum_fraction": 0.5901455879211426,
"avg_line_length": 34.720001220703125,
"blob_id": "e3afbf7c8b9e420305ed3931bc3556a39a8406ce",
"content_id": "6bdfd096b8d2b41aaffba8768fa580f0ae84bf39",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 893,
"license_type": "no_license",
"max_line_length": 216,
"num_lines": 25,
"path": "/manage_orders_and_payments/migrations/0002_auto_20200922_1152.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-22 11:52\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_pricing', '0004_auto_20200922_1146'),\n ('manage_orders_and_payments', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dforders',\n name='Duration_Type',\n field=models.CharField(blank=True, choices=[('D', 'Days'), ('M', 'Month'), ('Y', 'Year')], max_length=120, null=True),\n ),\n migrations.AlterField(\n model_name='dforders',\n name='Package',\n field=models.ForeignKey(blank=True, choices=[('S', 'Start'), ('B', 'Business'), ('P', 'Professional'), ('M', 'Max')], null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_pricing.DfPrice'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.6569343209266663,
"alphanum_fraction": 0.6605839133262634,
"avg_line_length": 32.25,
"blob_id": "a63047261fbf0466251f9d420edb0bd9ad49f475",
"content_id": "0cff18c994748389314932e35fd870f866256125",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 274,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 8,
"path": "/manage_faqs/serializers.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom .models import DfFaqs\r\n\r\nclass DfFaqsSerializer(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfFaqs\r\n fields = ['id','Category', 'Question', 'Question_slug', 'Ansews', 'Create_date']\r\n depth = 2\r\n"
},
{
"alpha_fraction": 0.5706806182861328,
"alphanum_fraction": 0.5895287990570068,
"avg_line_length": 31.931034088134766,
"blob_id": "e50b6776a0323d090bcd53e2da0de882d4aa7ecf",
"content_id": "044c945b5a5615b1dc4346f935d9bf6a870a92d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 955,
"license_type": "no_license",
"max_line_length": 187,
"num_lines": 29,
"path": "/manage_faqs/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-10 10:34\n\nimport autoslug.fields\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfFaqs',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Question', models.CharField(max_length=120)),\n ('Question_slug', autoslug.fields.AutoSlugField(always_update=True, blank=True, editable=False, null=True, populate_from='Question', unique_with=('Create_date__month',))),\n ('Ansews', models.TextField()),\n ('Create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ],\n options={\n 'verbose_name_plural': 'DF Faqs',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.7622950673103333,
"alphanum_fraction": 0.7622950673103333,
"avg_line_length": 22.399999618530273,
"blob_id": "6c935168cc806269f9289e8429781ceceb562dc9",
"content_id": "0744443569eeaae17d1c19202b42b80413b65e9c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 122,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 5,
"path": "/social_media_platforms/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\r\n\r\n\r\nclass SocialMediaPlatformsConfig(AppConfig):\r\n name = 'social_media_platforms'\r\n"
},
{
"alpha_fraction": 0.6751269102096558,
"alphanum_fraction": 0.6751269102096558,
"avg_line_length": 26.428571701049805,
"blob_id": "0c9ced198a274eb6d6c5707a8e8c080377710162",
"content_id": "d20539be0a6ae9b641e8f96bb5fa2538a36d9e9f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 197,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 7,
"path": "/reviews/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\n\r\nurlpatterns = [\r\n path('save-review', views.SaveReviewsView.as_view()),\r\n path('get-all-review', views.GetAllReviewView.as_view()),\r\n ]"
},
{
"alpha_fraction": 0.688715934753418,
"alphanum_fraction": 0.688715934753418,
"avg_line_length": 35,
"blob_id": "e99c2c608f09532f319996b9cfeb92c6ec2e26ae",
"content_id": "e0e76eea02ef0e801bb0c2c0e03b04926c4651bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 257,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 7,
"path": "/manage_bloges/serializers.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom .models import DfBlogs\r\n\r\nclass DfBlogsSerializer(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfBlogs\r\n fields = ['id', 'Blog_Title', 'Blog_slug', 'Blog_Image', 'Message','Create_date']"
},
{
"alpha_fraction": 0.5170068144798279,
"alphanum_fraction": 0.5873016119003296,
"avg_line_length": 22.5,
"blob_id": "4522972574f93d29186b03e843380366e8436e34",
"content_id": "08b4d2a42767861807c92da05e4ea0da5ef3c715",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 441,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 18,
"path": "/manage_locations/migrations/0003_auto_20200410_1701.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-10 11:31\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('manage_locations', '0002_auto_20200410_1653'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='dfbusinesslocation',\r\n name='About_Business',\r\n field=models.TextField(blank=True, null=True),\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.6787148714065552,
"alphanum_fraction": 0.6787148714065552,
"avg_line_length": 23.100000381469727,
"blob_id": "d77cedeaf3657332afd3111c760c326d6f5c205a",
"content_id": "5eb02b0ab40228e8aee1c4dba1c6bbdd34657d47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 249,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 10,
"path": "/manage_dropdown_value/urls.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.urls import path\r\nfrom . import views\r\n\r\n\r\nurlpatterns = [\r\n path('business-categoryes', views.BusinessCategoryesView.as_view()),\r\n path('counrty', views.CounrtyView.as_view()),\r\n path('states', views.StatesView.as_view()),\r\n\r\n]"
},
{
"alpha_fraction": 0.5821868181228638,
"alphanum_fraction": 0.6002896428108215,
"avg_line_length": 39.617645263671875,
"blob_id": "7633a5622398049688e7aa8a16a7a1e9844ffd24",
"content_id": "2cc7a997b39d2e2f76a32466e71cc30a5087b92f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1381,
"license_type": "no_license",
"max_line_length": 189,
"num_lines": 34,
"path": "/manage_jobs/migrations/0002_auto_20200919_1201.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-09-19 12:01\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_jobs', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterModelOptions(\n name='dfjobs',\n options={'verbose_name_plural': 'DF Jobs'},\n ),\n migrations.CreateModel(\n name='DfJobApplaication',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Name', models.CharField(max_length=120)),\n ('email', models.EmailField(max_length=120)),\n ('contact_no', models.BigIntegerField()),\n ('Application_Date', models.DateTimeField(default=django.utils.timezone.now)),\n ('Job', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Job_DfJobs', to='manage_jobs.DfJobs')),\n ('job_category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='AwWineType_DfApplay', to='manage_jobs.DfJobCategory')),\n ],\n options={\n 'verbose_name_plural': 'DF Job Applaication',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.7477477192878723,
"alphanum_fraction": 0.7477477192878723,
"avg_line_length": 20.200000762939453,
"blob_id": "6cceda60a020eefb838b0485432f45febb266fa5",
"content_id": "b55408e6c8d9ca9965657234c625cf7a26c23f78",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 111,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 5,
"path": "/manage_locations/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\r\n\r\n\r\nclass ManageLocationsConfig(AppConfig):\r\n name = 'manage_locations'\r\n"
},
{
"alpha_fraction": 0.7209658622741699,
"alphanum_fraction": 0.7255653738975525,
"avg_line_length": 47.26415252685547,
"blob_id": "e5da108c490b418f3f8b2f857705ce5f54de008e",
"content_id": "091a1bfd4b8b5866014ef7866e1a22a632c1f11f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2609,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 53,
"path": "/manage_dropdown_value/views.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\r\nfrom rest_framework.views import APIView\r\nfrom rest_framework.response import Response\r\nfrom rest_framework.permissions import IsAuthenticated\r\nfrom rest_framework.authentication import TokenAuthentication,SessionAuthentication,BasicAuthentication\r\nfrom .models import DfBusinessCategory,DfCountry,DfState\r\nfrom .serializear import DfBusinessCategorySerializers,DfCountrySerializers,DfStateSerializers\r\n# Create your views here.\r\n\r\nclass BusinessCategoryesView(APIView):\r\n authentication_classes = (TokenAuthentication,SessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n\r\n def get(self,request):\r\n businessCategory = {}\r\n if DfBusinessCategory.objects.filter(Status=True).exists():\r\n businessCategory = DfBusinessCategory.objects.filter(Status=True)\r\n businessCategory_si = DfBusinessCategorySerializers(businessCategory,many=True)\r\n businessCategory = businessCategory_si.data\r\n return Response({'BusinessCategory':businessCategory},status=200)\r\n\r\nclass CounrtyView(APIView):\r\n authentication_classes = (TokenAuthentication,SessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n def get(self,request):\r\n dfCountry = {}\r\n if DfCountry.objects.filter(Status=True).exists():\r\n dfCountry = DfCountry.objects.filter(Status=True)\r\n dfCountry_si = DfCountrySerializers(dfCountry,many=True)\r\n dfCountry = dfCountry_si.data\r\n return Response({'counrty':dfCountry},status=200)\r\n\r\nclass CounrtyView(APIView):\r\n authentication_classes = (TokenAuthentication,SessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n def get(self,request):\r\n dfCountry = {}\r\n if DfCountry.objects.filter(Status=True).exists():\r\n dfCountry = DfCountry.objects.filter(Status=True)\r\n dfCountry_si = DfCountrySerializers(dfCountry,many=True)\r\n dfCountry = dfCountry_si.data\r\n return Response({'counrty':dfCountry},status=200)\r\n\r\nclass StatesView(APIView):\r\n authentication_classes = (TokenAuthentication,SessionAuthentication,BasicAuthentication,)\r\n permission_classes = [IsAuthenticated]\r\n def get(self,request):\r\n dfState = {}\r\n if DfState.objects.filter(Status=True).exists():\r\n dfState = DfState.objects.filter(Status=True)\r\n dfState_si = DfStateSerializers(dfState,many=True)\r\n dfState = dfState_si.data\r\n return Response({'status':dfState},status=200)"
},
{
"alpha_fraction": 0.7699999809265137,
"alphanum_fraction": 0.7699999809265137,
"avg_line_length": 19,
"blob_id": "deacb0183b6a7c1537499fdad8144b710b5a3bd7",
"content_id": "cd94011590f60d2f1d7ee03b886d3f70c296aca9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 100,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 5,
"path": "/manage_campus/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ManageCampusConfig(AppConfig):\n name = 'manage_campus'\n"
},
{
"alpha_fraction": 0.7583333253860474,
"alphanum_fraction": 0.7583333253860474,
"avg_line_length": 22,
"blob_id": "c6438b67b63d4a3604748a73388b34403b43d976",
"content_id": "b993edd8eafa9c87126176a3f85ffdc357e24dfb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 120,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 5,
"path": "/manage_dropdown_value/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\r\n\r\n\r\nclass ManageDropdownValueConfig(AppConfig):\r\n name = 'manage_dropdown_value'\r\n"
},
{
"alpha_fraction": 0.6990787982940674,
"alphanum_fraction": 0.7011259198188782,
"avg_line_length": 42.5,
"blob_id": "7599a27a7bc525984e9c878646f7334281b6460c",
"content_id": "0f61830e020e8e4635e2e4163faaf233837dea9c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 977,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 22,
"path": "/manage_jobs/serializers.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom .models import DfJobCategory,DfJobs,DfJobApplaicationSet\r\n\r\nclass DfJobCategorySerializer(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfJobCategory\r\n fields = ['id', 'CategoryName', 'Create_date']\r\n\r\nclass DfJobsSerializer(serializers.ModelSerializer):\r\n class Meta:\r\n model = DfJobs\r\n fields = ['id', 'Category_name', 'Job_Title','Job_slug','Job_Description','Create_date']\r\n depth = 2\r\n\r\nclass DfJobsApplicationSerializer(serializers.ModelSerializer):\r\n # Job_DfJobs = DfJobsSerializer(read_only=True,many=True)\r\n Job_title = serializers.PrimaryKeyRelatedField(many=False,queryset=DfJobs.objects.all())\r\n job_cate = serializers.PrimaryKeyRelatedField(many=False,queryset=DfJobCategory.objects.all())\r\n class Meta:\r\n model = DfJobApplaicationSet\r\n fields = ['id', 'Job_title', 'job_cate','Name','email','contact_no','Application_Date']\r\n depth = 2"
},
{
"alpha_fraction": 0.5461604595184326,
"alphanum_fraction": 0.5729076862335205,
"avg_line_length": 27.268293380737305,
"blob_id": "79b4f8099762ad201bf1996e1091f088014afd48",
"content_id": "9b692df25d4a685d5025651489b7b5507655e524",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1159,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 41,
"path": "/manage_locations/migrations/0010_auto_20200416_0910.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-16 09:10\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0009_auto_20200413_1648'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Friday',\n ),\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Monday',\n ),\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Saturday',\n ),\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Sunday',\n ),\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Thursday',\n ),\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Tuseday',\n ),\n migrations.RemoveField(\n model_name='dfbusinesslocation',\n name='Operating_Hours_Wednesday',\n ),\n ]\n"
},
{
"alpha_fraction": 0.6206225752830505,
"alphanum_fraction": 0.6575875282287598,
"avg_line_length": 26.052631378173828,
"blob_id": "974f5fa0f23858e715a632b9045c4f44aa09c42e",
"content_id": "c2d3268274ce31ecb28dd55d75fb6f8da4aaa642",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 514,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 19,
"path": "/manage_locations/migrations/0017_auto_20200420_1102.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-20 11:02\n\nfrom django.db import migrations, models\nimport manage_locations.models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('manage_locations', '0016_dflocationopenhours_date'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dfbusinesslocation',\n name='Business_Logo',\n field=models.FileField(blank=True, null=True, upload_to=manage_locations.models.user_directory_path),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5641665458679199,
"alphanum_fraction": 0.5675970911979675,
"avg_line_length": 40.19078826904297,
"blob_id": "bd330bb315742c156a0180f2223d33b8aab2698a",
"content_id": "05f6d32667b47eb549df8d9d6a4e226318b4cdba",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6413,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 152,
"path": "/manage_voice_faqs/serializear.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\r\nfrom django.shortcuts import get_object_or_404\r\nfrom .models import DfVoiceFaqs\r\nfrom rest_framework import exceptions\r\nfrom rest_framework.response import Response\r\nfrom accounts.models import DfUser\r\nfrom manage_locations.models import DfBusinessLocation\r\nfrom django.db.models import Q\r\n\r\n\r\n\r\n\r\nclass AddVoiceFaqs(serializers.Serializer):\r\n DfUser = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Location = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n question = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n answer = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n\r\n class Meta:\r\n model = DfVoiceFaqs\r\n fields = ['DfUser','Location','question','answer','Craete_Date']\r\n\r\nclass GetAllFaqSerializersValidate(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n get_user_instance = {}\r\n if user_id:\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n get_user_instance = get_object_or_404(DfUser, user_id=user_id)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return get_user_instance\r\n\r\nclass GetAllFaqSerializersLocationValidate(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n location_id = serializers.CharField()\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n location_id = data.get(\"location_id\", \"\")\r\n data_set = {}\r\n get_user_instance = {}\r\n if user_id:\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n data_set[\"get_user_instance\"] = get_object_or_404(DfUser, user_id=user_id)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n if location_id:\r\n if DfBusinessLocation.objects.filter(id=location_id).exists():\r\n data_set[\"get_location_instance\"] = get_object_or_404(DfBusinessLocation, id=location_id)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return data_set\r\n\r\n\r\n\r\nclass GetAllFaqSerializersByIdValidate(serializers.Serializer):\r\n user_id = serializers.CharField()\r\n faq_id = serializers.CharField()\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n faq_id = data.get(\"faq_id\", \"\")\r\n data_set = {}\r\n get_user_instance = {}\r\n if user_id:\r\n if DfUser.objects.filter(user_id=user_id).exists():\r\n data_set[\"get_user_instance\"] = get_object_or_404(DfUser, user_id=user_id)\r\n else:\r\n mes = \"user_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide user_id.\"\r\n raise exceptions.ValidationError(mes)\r\n if faq_id:\r\n if DfVoiceFaqs.objects.filter(id=faq_id).exists():\r\n data_set[\"get_faq_instance\"] = get_object_or_404(DfVoiceFaqs, id=faq_id)\r\n else:\r\n mes = \"faq_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Must provide faq_id.\"\r\n raise exceptions.ValidationError(mes)\r\n return data_set\r\n\r\n\r\n\r\n\r\n# ====================EditLocationBusinessSerializers ========\r\n\r\nclass EditFaqSerializers(serializers.Serializer):\r\n user_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n Location_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n question = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n answer = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n faq_id = serializers.CharField(style={\"inpupt_type\": \"text\"}, write_only=True)\r\n def validate(self, data):\r\n user_id = data.get(\"user_id\", \"\")\r\n Location_id = data.get(\"Location_id\", \"\")\r\n question = data.get(\"question\", \"\")\r\n answer = data.get(\"answer\", \"\")\r\n faq_id = data.get(\"faq_id\", \"\")\r\n if Location_id:\r\n if DfBusinessLocation.objects.filter(id=Location_id).exists():\r\n location_ins = get_object_or_404(DfBusinessLocation,id=Location_id)\r\n if faq_id:\r\n if DfVoiceFaqs.objects.filter(id=faq_id).exists():\r\n if DfVoiceFaqs.objects.filter(DfUser__id=user_id).filter(Location=location_ins).filter(~Q(id =faq_id)).exists():\r\n msg = \"This question is alerady exists.\"\r\n raise exceptions.ValidationError(msg)\r\n else:\r\n DfVoiceFaqs.objects.filter(id=faq_id).update(\r\n Location=location_ins,\r\n question=question,\r\n answer=answer\r\n )\r\n update_info = \"Faq info update successfully\"\r\n else:\r\n mes = \"faq_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"faq_id provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"location_id is incorrect.\"\r\n raise exceptions.ValidationError(mes)\r\n else:\r\n mes = \"Location_id provide location_id.\"\r\n raise exceptions.ValidationError(mes)\r\n\r\n return update_info\r\n# ====================EditLocationBusinessSerializers ========\r\n\r\n\r\nclass GetAllFaqSerializers(serializers.ModelSerializer):\r\n\r\n\r\n class Meta:\r\n model = DfVoiceFaqs\r\n fields = ['id','DfUser', 'Location', 'question', 'answer', 'Craete_Date']\r\n depth = 1\r\n"
},
{
"alpha_fraction": 0.7528089880943298,
"alphanum_fraction": 0.7528089880943298,
"avg_line_length": 16.799999237060547,
"blob_id": "75e291fa5130341abcdf053c5c96295bbec8fd52",
"content_id": "fd529f51cfe542b1cc6db976aebb76f1341da00b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 89,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 5,
"path": "/queryes/apps.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass QueryesConfig(AppConfig):\n name = 'queryes'\n"
},
{
"alpha_fraction": 0.5655399560928345,
"alphanum_fraction": 0.6067600846290588,
"avg_line_length": 36.90625,
"blob_id": "472576f2deb9347cbbdfd5caa35c7051993d4d6a",
"content_id": "efc749906ca0b67a000a04b4fb411f283adf345a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1213,
"license_type": "no_license",
"max_line_length": 159,
"num_lines": 32,
"path": "/manage_voice_faqs/migrations/0001_initial.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-07-28 12:31\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('manage_locations', '0019_auto_20200420_1405'),\n ('accounts', '0005_auto_20200410_1503'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DfVoiceFaqs',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('question', models.CharField(max_length=500)),\n ('answer', models.TextField(blank=True, null=True)),\n ('Craete_Date', models.DateTimeField(default=django.utils.timezone.now)),\n ('DfUser', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.DfUser')),\n ('Location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='manage_locations.DfBusinessLocation')),\n ],\n options={\n 'verbose_name_plural': 'DF Voice Faqs',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.49614396691322327,
"alphanum_fraction": 0.544987142086029,
"avg_line_length": 19.61111068725586,
"blob_id": "fa8b47846cf416dffdb46a1748fd2e79748d1c59",
"content_id": "0ef16bb58d32618026945e47ae41cb397e2ba493",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 389,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 18,
"path": "/accounts/migrations/0003_dfuser_address.py",
"repo_name": "Deepaksinghpatel052/dashify",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.4 on 2020-04-09 18:01\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('accounts', '0002_dfuser'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='dfuser',\r\n name='Address',\r\n field=models.TextField(default=''),\r\n ),\r\n ]\r\n"
}
] | 137 |
jaredWu0805/CloudXR_server | https://github.com/jaredWu0805/CloudXR_server | 34a527f72fb24a614fd38c52503ed359ea44e292 | 2e94876211158b40dc17162498cb9bde0a729acb | 0d57497e532a39db3f7eedc857c7f50873a84897 | refs/heads/master | 2023-02-10T19:32:13.084795 | 2021-01-12T06:12:39 | 2021-01-12T06:12:39 | 316,393,337 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5892334580421448,
"alphanum_fraction": 0.6085429787635803,
"avg_line_length": 26.564516067504883,
"blob_id": "1cfd3c8a61631967815a75edd6cfc5546b894b79",
"content_id": "afc85f68daa406f493d36b61923ec0f8075d772e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3418,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 124,
"path": "/app.py",
"repo_name": "jaredWu0805/CloudXR_server",
"src_encoding": "UTF-8",
"text": "import os\nimport atexit\nfrom requests import get, post, exceptions\nfrom flask import Flask, request\nimport time\nfrom threading import Timer\n\n\napp = Flask(__name__)\n\nsteamVR_gameid = '250820'\nhellblade = '747350'\nredout = '519880'\nlaunch_cmd = 'start steam://rungameid/{0}'\nclose_cmd = 'taskkill /IM \"{0}\" /F'\ngame_server_manager_ip = '172.16.0.25'\n# game_server_manager_ip = '192.168.0.106'\nis_available = 1\nplayer_ip = ''\nlaunch_time = time.time()\ncurrent_game_id = ''\n\n\n# Register to manager\ndef register_server():\n try:\n req_data = {\n 'games': [hellblade, redout]\n }\n print(req_data)\n r = post('http://{0}:5000/register'.format(game_server_manager_ip), data=req_data)\n except exceptions.RequestException as e:\n raise SystemExit(e)\n\n\ndef unregistered():\n res = get('http://{0}:5000/deregister'.format(game_server_manager_ip))\n print(res.text)\n\n\ndef open_game():\n os.system(launch_cmd.format(current_game_id))\n req_data = {\n 'client_ip': player_ip,\n 'game_id': current_game_id,\n 'connection_status': 'playing'\n }\n print('playing game ', req_data)\n r = post('http://{0}:5000/connection-status'.format(game_server_manager_ip), data=req_data)\n\n\ndef update_status():\n req_data = {\n 'client_ip': player_ip,\n 'game_id': current_game_id,\n 'connection_status': 'closed'\n }\n print('closed game ', req_data)\n r = post('http://{0}:5000/connection-status'.format(game_server_manager_ip), data=req_data)\n\n\nregister_server()\natexit.register(unregistered)\n\n\[email protected]('/')\ndef index():\n print('host', request.host.split(':')[0])\n print('remote_addr', request.remote_addr)\n return 'CloudXR server'\n\n\[email protected]('/game-connection', methods=['POST', 'GET'])\ndef launch_cloudxr():\n global launch_time\n if request.method == 'POST':\n global player_ip, is_available, current_game_id\n if not is_available:\n return 'Game server not available now'\n game_title = request.form.get('game_title', type=str)\n game_id = request.form.get('game_id', type=str)\n player_ip = request.form.get('player_ip', type=str)\n print(game_title, game_id, player_ip)\n if is_available:\n is_available = 0\n current_game_id = game_id\n t = Timer(3, open_game)\n launch_time = time.time()\n t.start()\n return {'launch success': True}\n else:\n return {'launch success': False}\n # for launching steamVR process\n else:\n if time.time() > launch_time+10:\n return {'status': True}\n else:\n return {'status': False}\n\n\[email protected]('/game-disconnection', methods=['POST'])\ndef close_clourdxr():\n global player_ip, is_available, current_game_id\n print(player_ip, request.remote_addr)\n if request.form.get('client_ip', type=str) != player_ip or request.remote_addr != game_server_manager_ip:\n return 'Invalid request'\n # Close game app and steamVR, and reset game server status\n if os.system(close_cmd.format(\"vrmonitor.exe\")) == 0:\n update_status()\n player_ip = ''\n is_available = 1\n current_game_id = ''\n return {'status': True}\n else:\n return {'status': False}\n\n\[email protected]('/stream-info')\ndef get_stream_info():\n return 'Under construction'\n\n\nif __name__ == '__main__':\n app.run()\n"
}
] | 1 |
litalamram/Web-Data-Management | https://github.com/litalamram/Web-Data-Management | db24ff1c2c206569bbd895a4bb5a1da55c46287e | 75f00e849d75473e2bfb19cc57935829cbbcf2b2 | 397e43f1cd66a96f9ccdf07e74e99e46518c7e2b | refs/heads/master | 2020-07-27T04:02:34.784030 | 2019-09-16T17:46:18 | 2019-09-16T17:46:18 | 208,860,804 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.8368794322013855,
"alphanum_fraction": 0.8368794322013855,
"avg_line_length": 69.5,
"blob_id": "93fb0d99b213e2435795ba80e76c2daf63704a9b",
"content_id": "3aee81adb054c867d11582650867d6e9dd1c7ad4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 141,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 2,
"path": "/README.md",
"repo_name": "litalamram/Web-Data-Management",
"src_encoding": "UTF-8",
"text": "# Web-Data-Management\nImplementation of a question answering system. Applying information extraction from wikipedia. Using XPATH and SPARQL.\n"
},
{
"alpha_fraction": 0.5340937376022339,
"alphanum_fraction": 0.5486337542533875,
"avg_line_length": 31.169355392456055,
"blob_id": "9de4adc9cb15526f0fcc77630e71f47360c5bfd5",
"content_id": "f483d6663ec5a95c4443fa2b0497fad2a988b362",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7978,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 248,
"path": "/geo_qa.py",
"repo_name": "litalamram/Web-Data-Management",
"src_encoding": "UTF-8",
"text": "import sys\nimport requests\nimport lxml.html\nimport rdflib\nfrom rdflib import XSD, Literal, RDF\nimport re\n\nwiki_prefix = \"http://en.wikipedia.org\"\nontology = rdflib.Graph()\n\n\ndef question_1():\n print \"pm \" + how_many_pm()\n print \"countries \" + how_many_countries()\n print \"republic \" + how_many_republic()\n print \"monarchy \" + how_many_monarchy()\n\n\ndef how_many_countries():\n q = \"SELECT (count(distinct ?c) as ?num) \" \\\n \"WHERE {\" \\\n \" ?c a <http://example.org/country> . \" \\\n \"}\"\n gra = rdflib.Graph()\n gra.parse(\"ontology.nt\", format=\"nt\")\n rows = gra.query(q)\n return list(rows)[0][0]\n\n\ndef how_many_pm():\n q = \"SELECT (count(distinct ?p) as ?num) \" \\\n \"WHERE {\" \\\n \" ?c <http://example.org/prime_minister> ?p . \" \\\n \"}\"\n gra = rdflib.Graph()\n gra.parse(\"ontology.nt\", format=\"nt\")\n rows = gra.query(q)\n return list(rows)[0][0]\n\n\ndef how_many_republic():\n q = \"SELECT (count(distinct ?c) as ?num) \" \\\n \"WHERE {\" \\\n \" ?c <http://example.org/government> ?p . \" \\\n \"FILTER regex(str(?p), 'republic')\" \\\n \"}\"\n gra = rdflib.Graph()\n gra.parse(\"ontology.nt\", format=\"nt\")\n rows = gra.query(q)\n return list(rows)[0][0]\n\n\ndef how_many_monarchy():\n q = \"SELECT (count(distinct ?c) as ?num) \" \\\n \"WHERE {\" \\\n \" ?c <http://example.org/government> ?p . \" \\\n \"FILTER regex(str(?p), 'monarchy')\" \\\n \"}\"\n gra = rdflib.Graph()\n gra.parse(\"ontology.nt\", format=\"nt\")\n rows = gra.query(q)\n return list(rows)[0][0]\n\n\ndef get_query_answer(query):\n gra = rdflib.Graph()\n gra.parse(\"ontology.nt\", format=\"nt\")\n res = gra.query(query)\n return list(res)\n\n\ndef add_to_ontology(part1, part2, part3, is_date=False, is_string=False):\n part1 = part1.rstrip()\n part3 = part3.rstrip()\n part1 = part1.replace(\" \", \"_\").replace(\"\\n\", \"\")\n part3 = part3.replace(\" \", \"_\").replace(\"\\n\", \"\")\n\n str1 = rdflib.URIRef('http://example.org/' + part1)\n str2 = rdflib.URIRef('http://example.org/' + part2)\n\n if is_date:\n str3 = Literal(part3, datatype=XSD.date)\n elif is_string:\n str3 = Literal(part3, datatype=XSD.string)\n else:\n str3 = rdflib.URIRef('http://example.org/' + part3)\n\n ontology.add((str1, str2, str3))\n\n\ndef add_type_to_ontology(part1, part3):\n part1 = part1.replace(\" \", \"_\").replace(\"\\n\", \"\")\n part3 = part3.replace(\" \", \"_\").replace(\"\\n\", \"\")\n\n str1 = rdflib.URIRef('http://example.org/' + part1)\n\n str3 = rdflib.URIRef('http://example.org/' + part3)\n\n ontology.add((str1, RDF.type, str3))\n\n\ndef get_country_info(url, name):\n res = requests.get(url)\n doc = lxml.html.fromstring(res.content)\n a = doc.xpath(\"//table[contains(@class, 'infobox')]\")\n if len(a) == 0:\n return\n\n # prime minister\n pm = a[0].xpath(\".//tr[th//text()[.='Prime Minister']]/td\")\n if len(pm) > 0:\n pm_name = pm[0].xpath(\".//text()\")[0]\n pm_link = pm[0].xpath(\".//a/@href\")\n add_to_ontology(name, \"prime_minister\", pm_name)\n if len(pm_link) > 0:\n get_person_info(wiki_prefix + pm_link[0], pm_name)\n\n # president\n president = a[0].xpath(\".//tr[th//text()[.='President']]/td\")\n\n if len(president) > 0:\n president_name = president[0].xpath(\".//text()\")[0]\n president_link = president[0].xpath(\".//a/@href\")[0]\n add_to_ontology(name, \"president\", president_name)\n if len(president_link) > 0:\n get_person_info(wiki_prefix + president_link, president_name)\n\n # government\n government = a[0].xpath(\".//tr[th//text()[contains(., 'Government')]]/td//text()[not(ancestor::sup)]\")\n if len(government) > 0:\n gov = ' '.join([g.rstrip() for g in government if len(g.rstrip()) > 0])\n gov = gov.split(\"( de jure )\")[0]\n gov = gov.split(\"(de jure)\")[0]\n add_to_ontology(name, \"government\", gov)\n\n # capital city\n capital = a[0].xpath(\".//th[contains(text(), 'Capital')]/../td//text()\")\n if len(capital) > 0:\n add_to_ontology(name, \"capital\", capital[0])\n\n # area\n area = a[0].xpath(\".//th[contains(a/text(), 'Area')]/../following-sibling::tr/td/text()\")\n if len(area) > 0:\n add_to_ontology(name, \"area\", area[0].replace('\\u00A0', '') + '2', False, True)\n\n # population\n population = a[0].xpath(\".//th[contains(a/text(), 'Population')]/../following-sibling::tr/td/text()\")\n if len(population) > 0:\n add_to_ontology(name, \"population\", population[0].split(\" \")[0], False, True)\n\n\ndef get_person_info(url, name):\n res = requests.get(url)\n doc = lxml.html.fromstring(res.content)\n a = doc.xpath(\"//table[contains(@class, 'infobox')]\")\n if len(a) == 0:\n return\n\n dob = a[0].xpath(\".//td//span[@class='bday']//text()\")\n if len(dob) > 0:\n add_to_ontology(name, \"birth_date\", dob[0], True)\n else:\n dob = a[0].xpath(\".//th[contains(text(), 'Born')]/../td//text()\")\n if len(dob) > 0:\n add_to_ontology(name, \"birth_date\", dob[0], True)\n\n\ndef get_all_countries(url):\n res = requests.get(url)\n document = lxml.html.fromstring(res.content)\n\n # countries table\n rows = document.xpath(\"//h2/span[contains(text(),'List')]/following::table//tr/td[2]/a\")\n\n # iterate over the countries\n for r in rows:\n countries = r.xpath(\"./text()\")\n countries_links = r.xpath(\"./@href\")\n for i in range(len(countries)):\n add_type_to_ontology(countries[i], \"country\")\n get_country_info(wiki_prefix + countries_links[i], countries[i])\n\n\ndef build_ontology(file_name):\n f = open(file_name, 'a+')\n get_all_countries(\"https://en.wikipedia.org/wiki/List_of_countries_by_population_(United_Nations)\")\n ontology.serialize(file_name, format=\"nt\")\n f.close()\n\n\ndef parse_question(nl_question):\n # who/what is the ... of ...?\n q1 = \"SELECT ?e \" \\\n \"WHERE {{\" \\\n \" <http://example.org/{0}> <http://example.org/{1}> ?e .\" \\\n \"}}\"\n\n # who is...?\n q2 = \"SELECT ?e ?r \" \\\n \"WHERE {{\" \\\n \" ?e ?r <http://example.org/{0}> .\" \\\n \"}}\"\n\n # when was the... of ... born?\n q3 = \"SELECT ?date \" \\\n \"WHERE {{\" \\\n \" <http://example.org/{0}> <http://example.org/{1}> ?e .\" \\\n \" ?e <http://example.org/birth_date> ?date\" \\\n \"}}\"\n\n if nl_question.startswith(\"Who is the\"):\n relation, entity = re.findall(r\"Who is the (.*?) of (.*?)\\?\", nl_question)[0]\n query = q1.format(entity.replace(' ', '_'), relation.lower().replace(' ', '_'))\n\n elif nl_question.startswith(\"Who\"):\n entity = re.findall(r\"Who is (.*?)\\?\", nl_question)[0]\n query = q2.format(entity.replace(' ', '_'))\n\n elif nl_question.startswith(\"What\"):\n relation, entity = re.findall(r\"What is the (.*?) of (.*?)\\?\", nl_question)[0]\n query = q1.format(entity.replace(' ', '_'), relation.lower().replace(' ', '_'))\n\n elif nl_question.startswith(\"When\"):\n relation, entity = re.findall(r\"When was the (.*?) of (.*?) born\\?\", nl_question)[0]\n query = q3.format(entity.replace(' ', '_'), relation.lower().replace(' ', '_'))\n\n else:\n print(\"wrong question\")\n sys.exit(-1)\n\n res = get_query_answer(query)\n if len(res) > 0:\n if nl_question.startswith(\"Who is\") and not nl_question.startswith(\"Who is the\"):\n print \"{0} of\".format(res[0][1].replace(\"http://example.org/\", \"\").replace(\"_\", \" \").capitalize()),\n countries = [r[0].replace(\"http://example.org/\", \"\").replace(\"_\", \" \") for r in res]\n print \", \".join(countries)\n\n else:\n print res[0][0].replace(\"http://example.org/\", \"\").replace(\"_\", \" \")\n return res\n\n\nif __name__ == '__main__':\n if sys.argv[1] == \"create\":\n build_ontology(sys.argv[2])\n elif sys.argv[1] == \"question\":\n q = \" \".join(sys.argv[2:])\n parse_question(q)\n"
}
] | 2 |
ravisankaradepu/MiscScripts | https://github.com/ravisankaradepu/MiscScripts | 018d0c60a5bc7d3bcfbd9ca729f99f76a61bbb2a | 2c7a2fa3d3d597c7f86fc4dd05adc251e0ef1c9a | b90d8815ac9ee88268afff47dfe06b898c5c1173 | refs/heads/master | 2021-01-11T02:30:12.004794 | 2016-10-15T04:33:17 | 2016-10-15T04:33:17 | 70,958,852 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7358871102333069,
"alphanum_fraction": 0.7681451439857483,
"avg_line_length": 54.11111068725586,
"blob_id": "4f737510f5941b16a9f75a48610643b9f2def073",
"content_id": "49d0823603a7a90aaa0d9ed461d5af882aab3c86",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 496,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 9,
"path": "/compareWeights.py",
"repo_name": "ravisankaradepu/MiscScripts",
"src_encoding": "UTF-8",
"text": "// Python code to compare weights from two different CNN's, to check the CNN's when trained with same random seed\n// net1 should be initialized to 0 else it goes into infinite loop\nimport caffe\nnet1 = caffe.Net('examples/mnist/lenet_train_test.prototxt','examples/mnist/lenet1_iter_5.caffemodel',caffe.TEST)\na=net1.params['conv1'][0].data\nnet1=0\nnet1 = caffe.Net('examples/mnist/lenet_train_test.prototxt','examples/mnist/lenet2_iter_5.caffemodel',caffe.TEST)\nb=net1.params['conv1'][0].data\na==b\n"
}
] | 1 |
JilaniMokrani/NCQtestTechnique | https://github.com/JilaniMokrani/NCQtestTechnique | 946c6a2ba33e92e5ece04483c0e75225ac1b3e9a | 6280cb58c315982726acb302e541415f530bcc4f | 0e05eecace5085ec5309cfad7ce7910c4ffded12 | refs/heads/main | 2023-02-08T14:17:25.392537 | 2021-01-05T01:26:06 | 2021-01-05T01:26:06 | 326,852,267 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5130568146705627,
"alphanum_fraction": 0.5499231815338135,
"avg_line_length": 21.482759475708008,
"blob_id": "4e2ec26ea788e5cac6659fe0e540505f4080965d",
"content_id": "2f45e911d48c4266762ea2c6e47c925af1dbae5c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 651,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 29,
"path": "/Algo2.py",
"repo_name": "JilaniMokrani/NCQtestTechnique",
"src_encoding": "UTF-8",
"text": "def CountNumberOfWays(nbRungs, Modulo):\n\n print(nbRungs)\n\n if nbRungs == 2:\n return 2 % Modulo #either 1,1 or 2\n elif nbRungs == 1:\n return 1\n elif nbRungs == 0:\n return 0\n else: \n return (CountNumberOfWays(nbRungs - 1, Modulo) + CountNumberOfWays(nbRungs - 2, Modulo)) % Modulo\n\ndef Solution(A, B):\n \n L = A.length\n result = []\n\n for i in range(L):\n if not B[i] == 0:\n result.append(CountNumberOfWays(A[i],B[i]))\n else:\n result.append(0)\n print(\"B[\" + str(i) + \"] = 0\")\n\n print(result)\n return result\n\nprint(CountNumberOfWays(40,10000000))"
},
{
"alpha_fraction": 0.4118673503398895,
"alphanum_fraction": 0.4781849980354309,
"avg_line_length": 25.090909957885742,
"blob_id": "02b4319ed8fd2a11853000d3f9d9474692a2e3d0",
"content_id": "83b1cf98add359e84a4253a4a5e878efc309f8ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 573,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 22,
"path": "/Algo1.py",
"repo_name": "JilaniMokrani/NCQtestTechnique",
"src_encoding": "UTF-8",
"text": "def Solution (N,A):\n \n if 1 <= N <= 100000:\n print(\"N must be between 1 and 100 000.\")\n return 0\n elif 1 <= A.length <= 100000:\n print(\"A has too many elements must not exced 100 000 elements.\")\n return 0\n\n result = [0] * N\n\n for i in A:\n if 1 <= i <= N:\n result[ i-1 ] += 1\n elif i == N + 1:\n result = [ max(result) ] * N\n else:\n print(\"Operation number \" + str(i+1) + \"is wrong. \\nIt must be between 1 and N+1.\")\n return 0\n \n print(result)\n return result"
},
{
"alpha_fraction": 0.3835616409778595,
"alphanum_fraction": 0.40753424167633057,
"avg_line_length": 16.205883026123047,
"blob_id": "5f99100779613191ede38916c732a7e1acbcb544",
"content_id": "56c7eed86245bc5e9ef5e1c82667b238a4e4da33",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 584,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 34,
"path": "/Algo3.py",
"repo_name": "JilaniMokrani/NCQtestTechnique",
"src_encoding": "UTF-8",
"text": "def Val(A,S):\n result = 0\n for i in range(A.length):\n result += A[i]*S[i]\n return abs(result)\n\ndef Solution(A):\n listOfS = [[1],[-1]]\n a= 1\n\n while a <= A.length:\n listOfS1 = listOfS\n\n for S in listOfS:\n S += [1]\n for S in listOfS1:\n S += [-1]\n \n listOfS += listOfS1\n print(listOfS)\n a+=1\n \n min = -1\n\n for S in listOfS:\n r = Val(A,S)\n if r == 0:\n print(0)\n return 0\n elif r < min:\n min = r\n \n print(min)\n return min"
}
] | 3 |
SergeySukharev/pytest_api | https://github.com/SergeySukharev/pytest_api | f869fa6dbba4a96ccfd8929d6df8a124a2b3a59f | b4f9cdf40ce40965865b6da05e5851553d38c29d | abed91e54947fa162c1f470d9fd64bc0e29655e6 | refs/heads/main | 2023-01-04T10:13:50.107736 | 2020-10-26T07:47:45 | 2020-10-26T07:47:45 | 306,417,770 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5736167430877686,
"alphanum_fraction": 0.5920600295066833,
"avg_line_length": 28.348623275756836,
"blob_id": "501c22ad7c1d568287763ca4757c7d557f9eafa0",
"content_id": "0e1b7d594cffeac0312f80b823296dd3f4dbb8ea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3199,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 109,
"path": "/test_api.py",
"repo_name": "SergeySukharev/pytest_api",
"src_encoding": "UTF-8",
"text": "import pytest\n\nPOSTS_MAX = 200\n\n\[email protected]('post_id', [1, POSTS_MAX])\ndef test_get_positive(session, base_url, post_id):\n res = session.get(url=f'{base_url}/{post_id}')\n\n assert res.status_code == 200\n assert res.json()['id'] == post_id\n\n\[email protected]('post_id', [-1, 0, POSTS_MAX + 1])\ndef test_get_negative(session, base_url, post_id):\n res = session.get(url=f'{base_url}/{post_id}')\n\n assert res.status_code == 404\n assert res.json() == {}\n\n\ndef test_get_all(session, base_url):\n res = session.get(url=f'{base_url}')\n\n assert len(res.json()) == POSTS_MAX\n for elem in res.json():\n for key in elem.keys():\n assert key in ['userId', 'id', 'title', 'completed']\n\n\ndef test_post(session, base_url):\n title = 'foo'\n body = 'bar'\n payload = {'title': title, 'body': body, 'userId': 1}\n res = session.post(url=base_url, json=payload)\n\n assert res.status_code == 201\n j = res.json()\n assert j['id'] == POSTS_MAX + 1\n assert j['userId'] == 1\n assert j['title'] == title\n assert j['body'] == body\n\n\ndef test_put_positive(session, base_url):\n payload = {'title': 'foo', 'body': 'bar', 'id': 1, 'userId': 1, 'completed': True}\n res = session.put(url=f'{base_url}/{payload[\"id\"]}', json=payload)\n\n assert res.status_code == 200\n res_json = res.json()\n assert res_json['title'] == payload['title']\n assert res_json['body'] == payload['body']\n assert res_json['id'] == payload['id']\n assert res_json['completed'] == payload['completed']\n\n\[email protected]('post_id, data',\n [(1, -1), (201, {'title': 'foo', 'body': 'bar', 'id': 1, 'userId': 1, 'completed': True})])\ndef test_put_negative(session, base_url, post_id, data):\n res = session.put(url=f'{base_url}/{post_id}', json=data)\n\n assert res.status_code == 500\n\n\[email protected]('field, value', [\n (\"userId\", 88),\n (\"id\", 66),\n (\"title\", \"test title\"),\n (\"completed\", True)\n])\ndef test_patch(session, base_url, field, value):\n res = session.patch(url=f'{base_url}/1', json={field: value})\n\n assert res.status_code == 200\n assert res.json()[field] == value\n\n\ndef test_delete(session, base_url):\n res = session.delete(url=f'{base_url}/1')\n\n assert res.status_code == 200\n assert not res.json()\n\n\[email protected]('field, value', [\n (\"userId\", 9),\n (\"id\", 4),\n (\"title\", 'vel non beatae est'),\n (\"completed\", False),\n (\"completed\", True)\n])\ndef test_filter_positive(session, base_url, field, value):\n str_val = str(value).lower() if isinstance(value, bool) else str(value)\n res = session.get(url=f'{base_url}', params={field: str_val})\n\n assert res.status_code == 200\n r_json = res.json()\n assert len(r_json) > 0\n for elem in r_json:\n assert elem[field] == value\n\n\[email protected]('params', [{\"userId\": 'User'}, {\"id\": 5676}, {\"title\": \"there is no such title\"},\n {\"completed\": 'i don know'}])\ndef test_filter_negative(session, base_url, params):\n res = session.get(url=f'{base_url}', params=params)\n\n assert res.status_code == 200\n assert res.json() == []\n"
},
{
"alpha_fraction": 0.6000000238418579,
"alphanum_fraction": 0.6000000238418579,
"avg_line_length": 12,
"blob_id": "df8c07520a7e2f8b69606479d6afe476b51cd550",
"content_id": "a1bb0829aba83f01e8a4ecfe8d4c7cb0f7b228c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 15,
"license_type": "no_license",
"max_line_length": 12,
"num_lines": 1,
"path": "/README.md",
"repo_name": "SergeySukharev/pytest_api",
"src_encoding": "UTF-8",
"text": "# pytest_api\n \n"
}
] | 2 |
JakubSzczepanowski/Walk-animation-pygame | https://github.com/JakubSzczepanowski/Walk-animation-pygame | 8705ea9edc0640e05338b1ded00f61ff5dbdc0fb | 14116fe7e117988efdbaeceb804695202e55c017 | 8b5d3b1456408e2bc2d1e9c2d1aa16eb582795fd | refs/heads/master | 2021-03-15T23:40:13.691350 | 2020-03-14T14:13:02 | 2020-03-14T14:13:02 | 247,288,228 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5148090124130249,
"alphanum_fraction": 0.5409197211265564,
"avg_line_length": 26.010526657104492,
"blob_id": "24ad285d41712736ef8e566f10f3530c4632b58c",
"content_id": "2486af861bc79ad3f0061debe0d73faf7f7edf44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2566,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 95,
"path": "/walk.py",
"repo_name": "JakubSzczepanowski/Walk-animation-pygame",
"src_encoding": "UTF-8",
"text": "import pygame\npygame.init()\n\nwidth,height = 1028,769\n\nscreen = pygame.display.set_mode((width,height))\npygame.display.set_caption('Walk')\nFPS = 24\nclock = pygame.time.Clock()\nground = pygame.image.load('ground.png')\nforward,backward = [],[]\nfor i in range(8):\n forward.append(pygame.image.load(f'sheet/sprite_{i}.png'))\n backward.append(pygame.image.load(f'sheet/sprite_{i+8}.png'))\n\nf_index,b_index = 0,0\n\nclass Hero:\n def __init__(self,x,y,v,s):\n self.x = x\n self.y = y\n self.v = v\n self.s = s\n self.jump = False\n self.turn = 'r'\n self.width = 108\n self.height = 140\n\nclass Blast:\n def __init__(self,x,y,radius,color,facing):\n self.x = x\n self.y = y\n self.radius = radius\n self.color = color\n self.facing = facing\n self.v = 8 * facing\n\n def draw(self,screen):\n pygame.draw.circle(screen,self.color,(self.x,self.y),self.radius)\n\nplayer = Hero(20,395,0,10)\nbullets = []\nwhile True:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n screen.blit(ground,(0,0))\n keys = pygame.key.get_pressed()\n if keys[pygame.K_RIGHT] != 0:\n player.turn = 'r'\n screen.blit(forward[f_index%8],(player.x+player.v,player.y))\n f_index += 1\n player.v += 15\n elif keys[pygame.K_LEFT] != 0:\n player.turn = 'l'\n screen.blit(backward[b_index%8],(player.x+player.v,player.y))\n b_index += 1\n player.v -= 15\n else:\n if player.turn == 'r':\n screen.blit(forward[0],(player.x+player.v,player.y))\n else:\n screen.blit(backward[3],(player.x+player.v,player.y))\n \n if not player.jump:\n if keys[pygame.K_UP] != 0:\n player.jump = True\n else:\n if player.s >= -10:\n player.y -= round((player.s * abs(player.s)) * 0.5)\n player.s -= 1\n else:\n player.s = 10\n player.jump = False\n\n if keys[pygame.K_SPACE] != 0:\n if player.turn == 'r':\n facing = 1\n else:\n facing = -1\n\n if len(bullets) < 5:\n bullets.append(Blast(round(player.x+player.v+player.width/2),round(player.y+player.height/2),6,(169,169,169),facing))\n\n for bullet in bullets:\n if bullet.x < width and bullet.x > 0:\n bullet.x += bullet.v\n bullet.draw(screen)\n else:\n bullets.remove(bullet)\n \n \n pygame.display.update()\n clock.tick(FPS)\n"
},
{
"alpha_fraction": 0.6634615659713745,
"alphanum_fraction": 0.7596153616905212,
"avg_line_length": 27.363636016845703,
"blob_id": "c36543c584a63d56120891ab1c52967a027487a1",
"content_id": "060323b27134c105bdcd899b453873d4af454a25",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 312,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 11,
"path": "/sheet/rename.sh",
"repo_name": "JakubSzczepanowski/Walk-animation-pygame",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nmv sprite_00.png sprite_0.png\nmv sprite_01.png sprite_1.png\nmv sprite_02.png sprite_2.png\nmv sprite_03.png sprite_3.png\nmv sprite_04.png sprite_4.png\nmv sprite_05.png sprite_5.png\nmv sprite_06.png sprite_6.png\nmv sprite_07.png sprite_7.png\nmv sprite_08.png sprite_8.png\nmv sprite_09.png sprite_9.png\n"
},
{
"alpha_fraction": 0.8313252925872803,
"alphanum_fraction": 0.8313252925872803,
"avg_line_length": 40.5,
"blob_id": "c37b40f0d6d1b398ea2f05e9c20b0459abbc08c2",
"content_id": "ebb3cc8563a69e841d2e16f529f2b402d7f4e5ea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 83,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 2,
"path": "/README.md",
"repo_name": "JakubSzczepanowski/Walk-animation-pygame",
"src_encoding": "UTF-8",
"text": "# Walk-animation-pygame\nWalk animation using sprite sheet written in pygame module\n"
}
] | 3 |
SCengiz/openCV | https://github.com/SCengiz/openCV | 057b60ff9fa1409cf3743cc77e2e966e9e2128b8 | 9740fd0f72901e1673bfd829561efe707995a6ed | dfb9faad7ba737dffbbf8850d76c9948773dec0c | refs/heads/main | 2023-03-09T07:09:10.892662 | 2021-02-23T13:13:27 | 2021-02-23T13:13:27 | 339,727,922 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6166484355926514,
"alphanum_fraction": 0.6370208263397217,
"avg_line_length": 35.22222137451172,
"blob_id": "677aaae60f49f87e153f5d7bb01e788f6a24166f",
"content_id": "0ab8fba7ebc92c2873e609b9a7edc38cc0778f6d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4631,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 126,
"path": "/airplane_Engines/codes/airplane_driver.py",
"repo_name": "SCengiz/openCV",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\n# SORAY CENGİZ ELM 463 PROJECT\n# 03.01.2021 17:40\n# Kodu çalıştırabilmek ilgili modüllerin yüklü olduğundan emin olunuz\n\nimport plotly.express as px\nimport plotly.graph_objects as go\nfrom skimage import data, filters, measure, morphology\nimport sys\n\nimport numpy as np\nimport cv2 as cv\nimport matplotlib.pyplot as plt\n\nimport math\n\nimport airplane_module as am\n\n# MAIN \n\n# Giriş görüntüsü algoritmaya çağırılmaktadır.\nimage_input = cv.imread(\"input_1.pgm\", 0) \nimage_new_size = cv.resize(image_input, (1280,800), interpolation = cv.INTER_LINEAR)\n\n# Adative CLAHE uygulanmaktadır.\nadaptive_hist_obj = cv.createCLAHE(clipLimit=5.0, tileGridSize=(12,12))\nhistogram_out = adaptive_hist_obj.apply(image_new_size)\n\n#show_hist('histogram_of_adaptive_histogram', histogram_out)\n\n# Görüntü yumuşatılmıştır.\nimage_blur = cv.GaussianBlur(histogram_out, (5,5), 0)\nimage_to_loop = image_blur\n\n# Canny uygulanarak kenarlar tespit edilmiştir.\nimage_edges = cv.Canny(image_blur, 100, 140)\n\n# Hough Transform uygulanarak çıkış gözlemlenmiştir.\nmarked_circles = \\\n cv.HoughCircles(image_edges, cv.HOUGH_GRADIENT, 1 , 80, param1 = 80, param2 = 20, minRadius = 15, maxRadius = 80)\nmarked_circles_round = np.uint16(np.around(marked_circles))\n\n# Thresholded görüntü elde edilmiştir.\noutput_of_thresholded_image = am.output_of_thresholded_marked_image(image_to_loop, marked_circles_round)\noutput_inverse_of_thresholded_image = cv.bitwise_not(output_of_thresholded_image)\n\n# Görüntüye sırası ile Erosion ardından Dilation uygulanmıştır.\nkernel = cv.getStructuringElement(cv.MORPH_ELLIPSE,(3,3))\n\noutput_of_erosion_image = cv.erode(output_inverse_of_thresholded_image, kernel, iterations = 1)\noutput_of_dilation_image = cv.dilate(output_of_erosion_image, kernel, iterations = 1)\n\nbefore_connected_components_image = output_of_dilation_image\n\n# Etiket grupları ve etiketlenmiş görüntünün sonucunun görebiliriz.\nlabel_groups = am.connected_component_label(before_connected_components_image)[0]\noutput_of_labeled_image = am.connected_component_label(before_connected_components_image)[1]\n\n# Figure işlemleri.\ncontour = None\ny = ()\nx = ()\n\nfig = px.imshow(image_new_size, binary_string=True)\nfig.update_traces(hoverinfo='skip')\n\nprops = measure.regionprops(label_groups, before_connected_components_image)\nproperties_1 = ['eccentricity']\nproperties_2 = ['area']\nproperties_3 = ['major_axis_length']\nproperties_4 = ['minor_axis_length']\nall_properties = ['eccentricity', 'area', 'major_axis_length', 'minor_axis_length']\n\n# Condition'lardan geçen etiketlerin özelliklerini kaydedebilmek için listeler oluşturuldu.\nlist_ecc = []\nlist_area = []\nlist_major = []\nlist_minor = []\n\n# list_of_label[] hangi etiketlerde doğru şartlar sağlandı sorusunun cevabını tutmaktadır.\nmotor_counter = 0\nlist_of_label = []\n\nfor index in range(1, label_groups.max()):\n hoverinfo = ''\n label = props[index].label\n for prop_name_1 in properties_1:\n holder_ecc = getattr(props[index], prop_name_1)\n \n if holder_ecc < 0.70:\n list_ecc.append(holder_ecc)\n \n for prop_name_2 in properties_2:\n holder_area = getattr(props[index], prop_name_2)\n \n if holder_area > 600:\n list_area.append(holder_area)\n \n for prop_name_3, prop_name_4 in zip(properties_3,properties_4):\n holder_major = getattr(props[index], prop_name_3)\n holder_minor = getattr(props[index], prop_name_4)\n list_major.append(holder_major)\n list_minor.append(holder_minor)\n \n if holder_major - holder_minor < 20:\n contour = measure.find_contours(label_groups == label, 0.5)[0]\n y, x = contour.T\n motor_counter += 1\n hoverinfo = ''\n list_of_label.append(index)\n for prop_names in all_properties:\n hoverinfo += f'<b>{prop_names}: {getattr(props[index], prop_names):.2f}</b><br>'\n else:\n pass\n else:\n pass \n else:\n pass\n \n fig.add_trace(go.Scatter(\n x=x, y=y, name=label,\n mode='lines', fill='toself', showlegend=False,\n hovertemplate=hoverinfo, hoveron='points+fills'))\n \nfig.show()\n\n"
},
{
"alpha_fraction": 0.6081499457359314,
"alphanum_fraction": 0.6353163719177246,
"avg_line_length": 39.10344696044922,
"blob_id": "49a9c163ec4c9644eec3867c47953907ea78eded",
"content_id": "c42ec52e6e3de872185ae6f6e79fea833b692e51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5900,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 145,
"path": "/airplane_Engines/codes/airplane_module.py",
"repo_name": "SCengiz/openCV",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\n# SORAY CENGİZ ELM 463 PROJECT\n# 03.01.2021 17:40\n# Kodu çalıştırabilmek ilgili modüllerin yüklü olduğundan emin olunuz\n\nimport plotly.express as px\nimport plotly.graph_objects as go\nfrom skimage import data, filters, measure, morphology\nimport sys\n\nimport numpy as np\nimport cv2 as cv\nimport matplotlib.pyplot as plt\n\nimport math\n\n\ndef connected_component_label(i_image):\n # Birbiri ile bağlantılı objeleri bulmak için kullanılan fonksiyondur.\n tmp_image = i_image\n number_of_labels, all_labels = cv.connectedComponents(tmp_image)\n\n label_hue = np.uint8(179*all_labels/np.max(all_labels))\n blank_ch = 255*np.ones_like(label_hue)\n labeled_img = cv.merge([label_hue, blank_ch, blank_ch])\n labeled_img[label_hue == 0] = 0\n \n labeled_img = cv.cvtColor(labeled_img, cv.COLOR_HSV2RGB)\n \n return all_labels,labeled_img\n\ndef image_out_w(title, i_image):\n \n cv.imshow(str(title), i_image)\n cv.waitKey(0)\n cv.destroyAllWindows()\n \n\ndef show_hist(title, i_image):\n # istenilen görüntünün histogramını gösteren ve ona bir başlık atayan fonksiyon\n plt.figure()\n plt.title(str(title))\n plt.hist(i_image.ravel(),256,[0,256])\n plt.show()\n\ndef circle_coordinates(marked_circles_parameters):\n # Hough Transform sonucu elde edilen parametrelerin işlenip derli toplu gösterilmesi\n print(' *** ALL MARKED CIRCLE COORDINATES *** ')\n \n size = marked_circles_parameters.shape[1]\n print('Number of marked circle : ', size)\n \n \"\"\"\n Bulunan circle'ın merkezinin konumu [c1, c2] şeklinde ifade edilmiştir.\n Bulunan circle'ın çapı ise circle_diameter olarak ifade edilmiştir. \n \n \"\"\"\n \n for item in range(size):\n ccols = marked_circles_parameters[0][item][0]\n crows = marked_circles_parameters[0][item][1]\n circle_diameter = marked_circles_parameters[0][item][2]\n \n print(item + 1, '.circle -> ',\n 'location : ' ,'[', ccols, crows, ']',' && ', 'diameter = ', circle_diameter)\n\n\ndef show_marked_circles(i_image, marked_circles_parameters):\n # Hough transform sonucunda bulunan muhtemel motorların bulunduğu yuvarlaklar\n # ... topluluğunun gösterimi.\n # ... Hough circle' sonucu elde edilen parametreler burada kullanılıp girdi görüntüsünde\n # ... ilgili yere yuvarlak çizmesini sağlayan bir fonksiyon yazıldı.\n # ... çizilen yuvarlaklar sadece görsel olarak görülmesi açısından beyaz seçildi.\n tmp_image = i_image\n counter = 1\n size = marked_circles_parameters.shape[1]\n \n for item in range(size):\n ccols = marked_circles_parameters[0][item][0]\n crows = marked_circles_parameters[0][item][1]\n circle_diameter = marked_circles_parameters[0][item][2]\n \n cv.circle(tmp_image, (ccols, crows), circle_diameter, (255,255,255), 5)\n cv.putText(tmp_image, str(counter) + '.circle', ((ccols - 50) , (crows + 20)), cv.FONT_HERSHEY_SIMPLEX,\n 1.1 , (255, 0, 0), 2)\n counter += 1\n\n image_out_ww('Founded circles with Hough Transform : ', tmp_image)\n \n\ndef output_of_thresholded_marked_image(i_image, marked_circles_parameters):\n # Hough circle sonucunda bulunan her bir circle'ın girdi görüntüsündeki konumuna \n # ... +40 satıra ve +40 sütuna ekleme yaparak her bir circle x ve y eksenleri 40 piksel genişletildi\n # ... genişletilmesinin sebebi tam yuvarlak dışında da motorun kalabilme ihtimalidir.\n # ... part_row ve part_col 2*diameter + 40 olarak belirlenmesinin sebebide budur olabildiğince ekstra \n # ... alana da baklımalıdır çünkü hough transform tam olarak motor çeperini yuvarlak içine alamamış olabilir.\n # ... Raporda daha detaylı görsel odaklı bir anlatımda yapılmıştır.\n \n rows, cols = i_image.shape\n temporary_image = np.zeros(shape = [rows,cols], dtype = np.uint8) \n \n size = marked_circles_parameters.shape[1]\n \n for item in range(size):\n ccols = marked_circles_parameters[0][item][0]\n crows = marked_circles_parameters[0][item][1]\n circle_diameter = marked_circles_parameters[0][item][2]\n \n part_row = 2*circle_diameter + 40\n part_col = 2*circle_diameter + 40\n \n part_image = np.zeros(shape = [part_row, part_col], dtype = np.uint8) \n \n for i in range(crows - circle_diameter - 20, crows + circle_diameter + 20):\n for j in range(ccols - circle_diameter - 20, ccols + circle_diameter + 20):\n if j > 1280 - circle_diameter or i > 800 - circle_diameter:\n pass\n else:\n temporary_image[i, j] = i_image[i, j] \n \n \n for i_part, i in zip(range(0, part_row), range(crows - circle_diameter - 20, crows + circle_diameter + 20)):\n for j_part, j in zip(range(0, part_col) ,range(ccols- circle_diameter - 20, ccols + circle_diameter + 20)):\n if j > 1280 - circle_diameter or i > 800 - circle_diameter:\n pass\n else:\n part_image[i_part, j_part] = temporary_image[i, j]\n \n \n Threshold_value = np.amax(part_image)\n Threshold_value = Threshold_value * 20\n Threshold_value = Threshold_value / 100\n\n part_image[part_image < Threshold_value] = 0 \n part_image[part_image >= Threshold_value] = 255 \n \n for i_part, i in zip(range(0, part_row), range(crows - circle_diameter - 20, crows + circle_diameter + 20)):\n for j_part, j in zip(range(0, part_col) ,range(ccols- circle_diameter - 20, ccols + circle_diameter + 20)):\n if j > 1280 - circle_diameter or i > 800 - circle_diameter:\n pass\n else:\n temporary_image[i, j] = part_image[i_part, j_part]\n \n return temporary_image\n\n"
}
] | 2 |
matheus-lima/control-unit | https://github.com/matheus-lima/control-unit | df790f7963e623a6f822756e7112e9bd168614a4 | 376a93a8fdfbf5793f5f27b8bbc19796bafba809 | 36fe3c2fd8290edd23b682c3b2a0cb767d919568 | refs/heads/master | 2021-01-20T07:57:23.358409 | 2017-05-08T15:00:41 | 2017-05-08T15:00:41 | 90,073,104 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7777777910232544,
"alphanum_fraction": 0.7777777910232544,
"avg_line_length": 30.5,
"blob_id": "36d5bff57d6c17f8eccb3d8c622b59f30cf8846e",
"content_id": "3381ec0bf4454beaa9aa9c1a7ac059aa0f695f70",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 63,
"license_type": "permissive",
"max_line_length": 47,
"num_lines": 2,
"path": "/README.md",
"repo_name": "matheus-lima/control-unit",
"src_encoding": "UTF-8",
"text": "# Control Unit\nA simple simulator of a control unit in python.\n"
},
{
"alpha_fraction": 0.4772341549396515,
"alphanum_fraction": 0.4923642575740814,
"avg_line_length": 34.828125,
"blob_id": "496f82dfaba95a7bc42bbb47ee2b2cec2aa6fc6c",
"content_id": "0ed102ac3c073250aec4049d5db49d611446c8e2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7124,
"license_type": "permissive",
"max_line_length": 141,
"num_lines": 192,
"path": "/src/uc.py",
"repo_name": "matheus-lima/control-unit",
"src_encoding": "UTF-8",
"text": " #!/usr/bin/python\r\n #-*- coding: utf-8-*-\r\n\r\n############################\r\n# Control Unit Simulator\r\n# Duo: Matheus Lima (github.com/matheus-lima)\r\n# Vinicius Matheus (github.com/vnicius)\r\n############################\r\n\r\nimport re\r\n\r\nclass ControlUnit():\r\n def __init__(self, data_mem, instr_mem):\r\n self.data_mem = data_mem #Memória de dados\r\n self.instr_mem = instr_mem #Memória de instruções\r\n self.reg = [0] * 5 #Array de registradores\r\n self.ir = \"\" #Armazena a instrução atual\r\n self.pc = 0 #Armazena o endereço da próxima instrução\r\n self.flags = [False] * 4 #Flags condicionais [zero, menor que, maior que, igual]\r\n # zero = 1000 , maior que = 0010, igual que = 0001 ...\r\n\r\n def fill_instr_mem(self, instr):\r\n self.instr_mem.append(instr)\r\n\r\n def fetch(self):\r\n self.ir = self.instr_mem[self.pc] #busca na memoria de instrucao e atualiza IR\r\n self.pc += 1 #atualiza pc\r\n\r\n def decode_exec(self):\r\n instr = self.ir.split(\" \") #Divide a instrução\r\n op = instr[0].upper() #Pega a operação da instrução\r\n data = ''.join(instr[1:]).replace(\" \", '')\r\n #Avalia qual tipo de operacao\r\n if (op == \"ADD\") or (op == \"SUB\") or (op == \"DIV\") or (op == \"MULT\"): #Casos seja uma operação aritmética\r\n self.arithmetic(op, data)\r\n elif op == \"STORE\":\r\n self.store(data)\r\n elif op == \"LOAD\":\r\n self.load(data)\r\n elif op == \"MOV\":\r\n self.mov(data)\r\n elif op == \"JMP\":\r\n self.jmp(data)\r\n elif op == \"CMP\":\r\n self.cmp(data)\r\n elif op == \"JL\" or op == \"JG\" or op == \"JE\" or op == \"JZ\":\r\n self.conditional_jump(op, data)\r\n\r\n def arithmetic(self, operation, data):\r\n #data: Transforma o array data em string e retira os espaços\r\n operands = data.split(\",\") #Separa os operandos\r\n op1 = op2 = 0 #Variaveis para guardar os dois operandos da operação\r\n\r\n reg_result = int(operands[0][1]) #Guarda o indíce do registrador de saída\r\n\r\n if \"R\" in operands[1].upper(): #Confere se é um registrador\r\n op1 = self.reg[int(operands[1][1])] #Pega apenas o número do registrador\r\n elif \"MD\" in operands[1].upper(): #Confere se é um acesso à memória de dados\r\n op1 = self.data_mem[int(re.sub(r'[^\\d]', '', operands[1].upper()))] #Pega apenas o endereço da memória\r\n else:\r\n op1 = int(operands[1]) #Caso seja um número\r\n\r\n if \"R\" in operands[2].upper():\r\n op2 = self.reg[int(operands[2][1])]\r\n elif \"MD\" in operands[2].upper():\r\n op2 = self.data_mem[int(re.sub(r'[^\\d]', '', operands[2].upper()))]\r\n else:\r\n op2 = int(operands[2])\r\n\r\n if operation == \"ADD\":\r\n self.reg[reg_result] = op1 + op2\r\n elif operation == \"SUB\":\r\n self.reg[reg_result] = op1 - op2\r\n elif operation == \"MULT\":\r\n self.reg[reg_result] = op1 * op2\r\n elif operation == \"DIV\":\r\n self.reg[reg_result] = int(op1 / op2)\r\n\r\n def store(self, data): # STORE MD[] R[] or MD[] X\r\n \"\"\"Recebe um endereço da memória e o dado que será armazenado nele\"\"\"\r\n operands = data.split(',')\r\n value = 0\r\n\r\n if \"R\" in operands[0][3:len(operands[0])-1]:\r\n adress_mem = self.reg[int(operands[0][4:len(operands[0])-1])]\r\n else:\r\n adress_mem = int(re.sub(r'[^\\d]', '', operands[0].upper())) #Endereço na memória\r\n\r\n if \"R\" in operands[1].upper(): #Confere se é um registrador\r\n value = self.reg[int(operands[1][1])]\r\n else:\r\n value = int(operands[1])\r\n\r\n self.data_mem[adress_mem] = value #Armazena o dado no endereço da memória\r\n\r\n\r\n def load(self, data): # lOAD R[] MD[]\r\n '''Armazena dados da memória de dados (ou valores) no registrador indicado'''\r\n operands = data.split(',')\r\n value = -1\r\n\r\n reg_result = int(operands[0][1])\r\n\r\n if \"MD\" in operands[1].upper(): #Confere se é um acesso à memória\r\n value = self.data_mem[int(re.sub(r'[^\\d]', '', operands[1].upper()))] #Pega apenas o endereço da memória\r\n\r\n self.reg[reg_result] = value\r\n\r\n def mov(self, data): #MOV R0, R1 or MOV R0, 5\r\n operands = data.split(',')\r\n value = -1\r\n\r\n reg_result = int(operands[0][1])\r\n\r\n if \"R\" in operands[1]:\r\n value = self.reg[int(operands[1][1])]\r\n else:\r\n value = int(operands[1])\r\n\r\n self.reg[reg_result] = value\r\n\r\n def jmp(self, data): # JMP MI[]\r\n\r\n if \"MI\" in data.upper(): #Confere se é um acesso à memória\r\n self.pc = int(re.sub(r'[^\\d]', '', data.upper())) #Pega apenas o endereço da memória\r\n else:\r\n #data é o rotulo\r\n for x in range(len(self.instr_mem)):\r\n if self.instr_mem[x].upper() == data.upper():\r\n self.pc = x+1\r\n break\r\n\r\n self.flags = [False] * 4 #reset flags\r\n\r\n def cmp(self, data): #CMP R[X] R[Y]\r\n #atualiza as flahs\r\n operands = data.split(',')\r\n value = -1\r\n\r\n if len(operands) == 1: #Compara se eh igual a zero\r\n value = self.reg[int(operands[0][1])]\r\n if value == 0:\r\n self.flags[0] = True\r\n #else ja eh falso\r\n else: #dois argumentos: R[X] <operation> R[Y]\r\n #recovery values from regs\r\n value = self.reg[int(operands[0][1])]\r\n value2 = self.reg[int(operands[1][1])]\r\n if value > value2:\r\n self.flags[2] = True\r\n elif value < value2:\r\n self.flags[1] = True\r\n elif value == value2:\r\n self.flags[3] = True\r\n\r\n def conditional_jump(self, op, data):\r\n if op == \"JZ\" and self.flags[0]:\r\n self.jmp(data)\r\n elif op == \"JL\" and self.flags[1]:\r\n self.jmp(data)\r\n elif op == \"JG\" and self.flags[2]:\r\n self.jmp(data)\r\n elif op == \"JE\" and self.flags[3]:\r\n self.jmp(data)\r\n\r\n\r\n def __str__(self):\r\n return (\"\\nIR: \"+self.ir+\"\\ndata_mem: \"+str(self.data_mem)+\"\\nReg: \"+str(self.reg)+\"\\nPC: \"+str(self.pc)+\"\\nFlags: \"+str(self.flags))\r\n\r\n#########################\r\nif __name__ == \"__main__\":\r\n data_mem = [0] * 16\r\n instr_mem = []\r\n\r\n name = \"../assets/fibonacci.txt\"\r\n #name = input(\"Programa: \")\r\n file = open(name, \"r\")\r\n\r\n uc = ControlUnit(data_mem, instr_mem)\r\n\r\n for line in file.readlines():\r\n if line is not \"\\s\":\r\n uc.fill_instr_mem(line.replace(\"\\n\", \"\"))\r\n file.close()\r\n\r\n tam = len(uc.instr_mem)\r\n\r\n while uc.pc < tam:\r\n uc.fetch()\r\n uc.decode_exec()\r\n print(uc)\r\n input()\r\n"
}
] | 2 |
AmirAli-N/vscode | https://github.com/AmirAli-N/vscode | 14f05fea8306a33ae53093abfb97a4530572bb4e | 888d14c983dbd47bf3d5649bd8f8df69ba6086f3 | 431b0168159844ca76472aa1f865761935a031c8 | refs/heads/master | 2020-07-09T20:23:21.558737 | 2019-08-23T21:59:01 | 2019-08-23T21:59:01 | 204,074,335 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.75390625,
"alphanum_fraction": 0.76171875,
"avg_line_length": 48.79999923706055,
"blob_id": "537c9dde61f87878dcb68da8995037985006626e",
"content_id": "d7d3cff3c73bdf1510669010d4665d162aea5440",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 256,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 5,
"path": "/iPython magic commands.py",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "#ipython magic commonds at\r\n#https://ipython.readthedocs.io/en/stable/interactive/magics.html?highlight=%25time#magic-time\r\n\r\n%%time #times the cpu time and wall time of the cell\r\n%time #times the cpu time and wall time of the first command in the cell\r\n\r\n"
},
{
"alpha_fraction": 0.7678391933441162,
"alphanum_fraction": 0.7738693356513977,
"avg_line_length": 87.81818389892578,
"blob_id": "acbe11ce53a373040b7bf46fbef6381ad95c09c3",
"content_id": "cc29990f041586581ceb1bebf806d81b25faaef1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 995,
"license_type": "no_license",
"max_line_length": 452,
"num_lines": 11,
"path": "/Hadoop HortonWorks.bash",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "#hadoop file system via horton works sandbox\r\n#the commands start with hadoop fs instead of hdfs dfs\r\n\tls -l #uses a long listed format to report\r\n\tls -a #shows all including hidden files\r\n\tls -la #combines both\r\n\thadoop fs -rmdir ml-100k #removes the ml-100k directory form hadoop file system\r\n\t\r\n\t\r\n\tsu root #Unix command allowing to run other commands with the previlidge of another user.\r\n\tpython RatingsBreakdown.py u.data #runs RatingsBreakdown script with u.data file locally, but uses mapreduce logic in mapping and reducing\r\n\tpython RatingsBreakdown.py -r hadoop --hadoop-streaming-jar /usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar u.data #runs the script on a hadoop cluster, -r hadoop tells MRJob that hadoop is used to run the job, --hadoop-streaming-jar /usr... secifies the java machine python streaming (This is specific to HortonWorks sandbox), u.data is the input file. In an acutal scenario, path of the data file in hadoop cluster should be specified.\r\n\t\r\n\t\r\n\t"
},
{
"alpha_fraction": 0.7275518178939819,
"alphanum_fraction": 0.7413660883903503,
"avg_line_length": 52.20833206176758,
"blob_id": "203ba324bbc918be225b316f5d57ade9b461a075",
"content_id": "68c278ace876d336ac884fa27e01218954e17a16",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1303,
"license_type": "no_license",
"max_line_length": 143,
"num_lines": 24,
"path": "/Cypress(Hadoop).bash",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "#configure jupyterhub to use cypress\r\n\t#in jupyter notebook, open a new terminal\r\n\t\tnano .jhubrc\r\n\t\t\tmodule add hdp\r\n\t\t\t#save -> y\r\n\t\t\t#write to .jhubrc\r\n\t\t#Click Control Panel\r\n\t\t#Stop my server\r\n\t\t#Start my server\r\n\t#Spawn a computing server with 1 node, 8 cores, 6gb per core, and 2:00:00 walltime\r\n\r\n#access hadoop file system\r\n\t#new terminal\r\n\t\tmodule add hdp\r\n\t\thdfs dfs -ls #show your hadoop home list directory\r\n\t\t\thdfs dfs -ls / #show directories from root directory\r\n\t\t\thdfs dfs -ls/user/snasrol #show directories specific to a user\r\n\t\thdfs dfs -put /user/snasrol/research/sex.csv /user/snasrol/teaching #copy file from research/sex to teaching folder\r\n\t\thdfs dfs -copyFromLocal /scratch2/snasrol/research/sex.csv /user/snasrol/teaching #copy from local file system to user file system\r\n\t\thdfs dfs -copyToLocal /user/snasrol/teachhing/sex.core /scratch2/snasrol/research/sex #copy from user file system to local file system\r\n\t\thdfs dfs -rm -r intro-to-spark #removes the intro-to-spark directory recursively\r\n\t\thdfs dfs -mkdir intro-to-spark #creates a new directory named intro-to-spark\r\n\t\thdfs dfs -cat /repository/gutenberg-shakespeare.txt \\\r\n\t\t\t2>/dev/null | head -n 20 #concatenates source path to stdout, 2>/dev/null ignores the error output, | head -n 20 displays the first 20 lines\r\n\t\t"
},
{
"alpha_fraction": 0.6406124234199524,
"alphanum_fraction": 0.653505265712738,
"avg_line_length": 36.84375,
"blob_id": "874921a999476e7633e0edb3ab99796b61d6cd3c",
"content_id": "09544ff86c2fccca79cfca938888ce8a94d83c46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1241,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 32,
"path": "/text_parsing 1.py",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "arr_str=[]\r\nfor i in input_lines:\r\n arr_str.append(i.split(' ')) # a list of list of words\r\nunlisted_arr=[val for sublist in arr for val in sublist] #flatten the list of list\r\ncharacters = list(map(chr, range(97,123))) #map produces a set of response for the range and function chr\r\nremove_non_words=[]\r\nnon_word=0\r\nfor i in unlisted_arr:\r\n for j in list(i):\r\n if j not in characters:\r\n non_word=1\r\n break\r\n if non_word==0:\r\n remove_non_words.append(i)\r\nnum_words=len(unlisted_arr)\r\noutput=str(num_words)+\"\\n\"+\"words\"+\"\\n\"\r\narr_str.sort()\r\nwords_num=[]\r\nremove_duplicate=list(dict.fromkeys(unlisted_arr)) #removing duplicates in a list\r\nfor i in remove_duplicate:\r\n words_num.append(unlisted_arr.count(i))\r\n output=output+i+\" \"+str(words_num[len(words_num)-1])+\"\\n\"\r\ncharacters = list(map(chr, range(97,123)))\r\nletters_num=[0]*len(characters) # a list of len(characters) of zeros\r\nfor i in unlisted_arr:\r\n for j in characters:\r\n if j in list(i):\r\n letters_num[characters.index(j)]=letters_num[characters.index(j)]+i.count(j)\r\noutput=output+\"letters\"+\"\\n\"\r\nfor i in range(0, len(letters_num)):\r\n output=output+characters[i]+\" \"+str(letters_num[i])+\"\\n\"\r\nprint(output)"
},
{
"alpha_fraction": 0.5573770403862,
"alphanum_fraction": 0.564828634262085,
"avg_line_length": 30.047618865966797,
"blob_id": "227128fa926f35eaf666c9d679a12d6d8a7d6340",
"content_id": "6ca8427ab2dfb262d923db5cd1b42119d2af3c3c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 671,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 21,
"path": "/top_5_ip.py",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "def high_avg(lines):\r\n line_lst=lines.split(\"\\n\")\r\n ip_lst=[]\r\n time_lst=[]\r\n for i in line_lst:\r\n temp_lst=i.split(\" \")\r\n ip_lst.append(temp_lst[0])\r\n time_lst.append(temp_lst[len(temp_lst)-1])\r\n \r\n unique_ip=set(ip_lst)\r\n avg_lst=[]\r\n for i in unique_ip:\r\n indices=[index for index, value in enumerate(unique_ip) if value == i]\r\n avg_lst.append(sum(int(time_lst[i]) for i in indices)/len(indices))\r\n \r\n top_5_lst=sorted(range(len(avg_lst)), key=lambda i: avg_lst[i], reverse=True)[:5]\r\n return_lst=[]\r\n for i in top_5_lst:\r\n return_lst.append(list(unique_ip)[i])\r\n \r\n return return_lst"
},
{
"alpha_fraction": 0.7288135886192322,
"alphanum_fraction": 0.7331875562667847,
"avg_line_length": 51.85293960571289,
"blob_id": "0e23183e124a1fbd33bbf164a8258612a03252e3",
"content_id": "6c688a0936c948ab555defdc329eea4ab479ba37",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1829,
"license_type": "no_license",
"max_line_length": 310,
"num_lines": 34,
"path": "/MapReduce.py",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "from mrjob.job import MRJob\r\nfrom mrjob.step import MRStep\r\n\r\nclass RatingsBreakdown(MRJob):\r\n\tdef steps(self):\r\n\t\treturn [MRStep(mapper=self.mapper_get_ratings, reducer=self.reducer_count_ratings)]\r\n\t\r\n\tdef mapper_get_ratings (self, -, line): #defining a mapper for MapReduce in Python. A mapper requires three input arguments: self is the object the function is instanced in, - usually unused in mapper but it could be a key comming from another reducer, line refers to the input line the function is applied on\r\n\t\t(userID, moveiID, rating, timestamp) = line.split('\\t')\r\n\t\tyield rating, 1 #returns only rating and 1 (it could be in a tuple)\r\n\t\r\n\tdef reducer_count_ratings (self, key, values): #reducer funtion is called for each unique keys, values refer to an iterable object associated with that key\r\n\t\tyield key, sum(values)\r\n\t\t\r\nif __name__=='__main__':\r\n\tRatingsBreakdown.run()\r\n\t\r\nclass MoviesSortedByCount(MRJob)\r\n\tdef steps(self): #defines a multisteps jobs (mapper, reducer)+(reducer)\r\n\t\treturn [MRStep(mapper=self.mapper_get_movieIDs, reducer=self.reducer_count_movieIDs), MRStep(reducer=self.reducer_sort_by_count)]\r\n\t\t\r\n\tdef mapper_get_movieIDs (self, -, line):\r\n\t\t(userID, movieID, rating, timestamp)=line.split('\\t')\r\n\t\tyield movieID, 1\r\n\t\r\n\tdef reducer_count_movieIDs (self, movieID, count):\r\n\t\tyield str(sum(count)).zfill(5), key #change the output to a string where it is filled by 0 from the left. This is intended to insure proper sorting (in hadoop streaming 10 before 2 because) in the shuffle and sort step of reducing in the next reducer function. \r\n\t\r\n\tdef reducer_sort_by_count (self, count, movies):\r\n\t\tfor movie in movies: #some movies have similar counts, so we have to iterate over the movies that have similar counts\r\n\t\t\tyield movie, count\r\n\r\nif __name__=='__main__':\r\n\tmoviesSortedByCount.run()"
},
{
"alpha_fraction": 0.7400850653648376,
"alphanum_fraction": 0.750775933265686,
"avg_line_length": 73.61739349365234,
"blob_id": "84d302ba074249560003258adcfc5f623f13d615",
"content_id": "95e297cd64551971ed8195f7e7b461904f86ac9a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8699,
"license_type": "no_license",
"max_line_length": 272,
"num_lines": 115,
"path": "/Cypress(PySpark).py",
"repo_name": "AmirAli-N/vscode",
"src_encoding": "UTF-8",
"text": "#In jupyter notebook using ! before keywords makes them shell commands\r\n\t!module list #list module like a shell command in terminal\r\n\t!cypress-kinit #initialize a cypress key\r\n\t!klist #shows validation period for the cypress key\r\n\t\r\n#Setting path and environment for spark in cypress cluster\r\n\timport sys\r\n\timport os\r\n\r\n\tsys.path.insert(0, '/usr/hdp/current/spark2-client/python')\r\n\tsys.path.insert(0, '/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip')\r\n\r\n\tos.environ['SPARK_HOME'] = '/usr/hdp/current/spark2-client/'\r\n\tos.environ['SPARK_CONF_DIR'] = '/etc/hadoop/synced_conf/spark2/'\r\n\tos.environ['PYSPARK_PYTHON'] = '/software/anaconda3/5.1.0/bin/python'\r\n\r\n#Spark application configuration setup\r\n\timport pyspark\r\n\tconf = pyspark.SparkConf() #initiate an object to be of type spark configuraton to set parameter for run a spark application\r\n\tconf.setMaster(\"yarn\") #YARN: yet another resource negotiator, setting the resource manager and job scheduler to yet\r\n\tconf.set(\"spark.yarn.queue\", \"interactive\") #this for cypress, runs the job on interactive mode in the yarn queue\r\n\tconf.set(\"spark.driver.memory\",\"4g\") #memory of spark driver mode, a resource consumed by wherever spark is running from\r\n\t#conf.set(\"spark.driver.core\",\"1\") #number of spark driver cores\r\n\t#conf.set(\"spark.driver.maxResultSize\", \"1g\") #Limit of total size of serialized results of all partitions for each Spark action\r\n\tconf.set(\"spark.executor.instances\", \"7\") #setting a multiplier for executer instances, executers run on the worker nodes\r\n\tconf.set(\"spark.executor.memory\",\"30g\") #in this case, 7*30 gigs is reserved for application executer memory\r\n\tconf.set(\"spark.executor.cores\",\"5\")\r\n\t##when running a spark application, a driver program is initiated, the spark context is initiated in the driver to set JVMs which are run be executers on \r\n\t##working nodes on Hadoop\r\n\tsc = pyspark.SparkContext(conf=conf) #initiates the spark context with a configuation object\r\n\r\n#Read a text file from hdfs\r\n\ttextFile = sc.textFile(\"/repository/gutenberg-shakespeare.txt\")\r\n\ttextFile = sc.textFile(\"\", minPartitions=5, use_unicode=True) #divides it to at least 5 partitions, and returns it as RDD of strings\r\n\ttextFile.getStorageLevel() #returns RDD's storage level, e.g., (FALSE, FALSE, FALSE, FALSE, 1) by answering the following questions:\r\n\t\t#Does RDD use disk?\r\n\t\t#Does RDD use memory?\r\n\t\t#Does RDD use off-heap memory?\r\n\t\t#Should an RDD be serialized (while persisting)?\r\n\t\t#How many replicas (default: 1) to use (can only be less than 40)?\r\n\ttextFile.getNumPartitions() #returns the number of partitions\r\n\r\n#RDD's recompute every time an action or transgformation are run. To persist RDD's on memory .cache and .persist may be used.\r\n\ttextFile.cache() #pins the textFile into memory only\r\n\ttextFile.persist(StorageLevel) #pins the textFile into storage, i.e., memory, disk, or off-heap memory\r\n\ttextFile.unpersist() #retires the RDD from memory\r\n\t\r\n\ttextFile.count() #count the number of words in the textFile\r\n#Queuing up some transformations functions, these transformations are lazy and will not run untill an action is called\r\n\twordcount = textFile.flatMap(lambda line: line.split(\" \")) \\#flatmap(f, preservesPartitioning=False), applies function 'f' to RDD, returns the flattened result as RDD\r\n .map(lambda word: (word, 1)) \\#map(f, preservesPartitioning=False), returns a new RDD by applying functon 'f' to each element\r\n .reduceByKey(lambda a, b: a + b)#reduceByKey(f, numPartitions) merges the data on each mapper and then reduces the data by key and associative function 'f'. 'a' is the accumulated sum in each partition shuffling and 'b' is the current value for the same 'key'.\r\n\r\n#Anonymous functions\r\n\tlambda <args>: <expr> #this is a construct that works like a function but it is not defined with a name\r\n\tg=lambda x: x**2\t\t\t\t\t\r\n\tprint (g(2))\r\n\t#which is equivalent to\r\n\tdef g(x):\r\n\t\treturn x**2\r\n\tprint(g(2))\r\n\r\n#Call an action that initiates the transformations\r\n\twordcount.saveAsTextFile(\"intro-to-spark/output-wordcount-01\") #saves the RDD as a text file\r\n\twordcount.take(20) #show the first 20 elements of the RDD\r\n\r\n#challenge: remove punctuation, lowercase, and remove spaces\r\n##this is the step by step approach\r\n\twordcount_challenge_step01=textFile.flatMap(lambda line: line.split(\" \")) #return a RDD with strings separated by a space\r\n\t#in the returned RDD, lots of '', i.e., empty strings, are counted as strings separated by a space\r\n\twordcount_challenge_step02=wordcount_challenge_step01.filter(lambda word: [word] if word!='' else []) #filters the RDD for elements equal to empty strip\r\n\tdef rm_punc_case(s_word): #define a custom function for later map transformations\r\n\t\ts_word=s_word.translate(str.maketrans(\"\", \"\", string.punctuation)) #translate function return a string in which all characters replaces using a table\r\n\t\t#maketrans(\"\", \"\", string.punctuation) maps every punctuation to none\r\n\t\ts_word=s_word.lower() #change the case of every letter to lower case\r\n\t\treturn (s_word, 1) #wraps the lower cased string into a tuple\r\n\twordcount_challenge_step03=wordcount_challenge_step02.map(rm_punc_case) #return a RDD after applying the rm_punc_case_space function\r\n\twordcount_challenge_step04=wordcount_challenge_step03.reduceByKey(lambda accum, n:accum+n) #reduce the RDD by key, accum is the current sum, and n is the current value (which is always 1)\r\n\t#tests show there are still 324 empty strings, i.e., '', in the RDD.\r\n\twordcount_cleaned_emptyspace=wordcount_challenge_step04.filter(lambda value: value if value[0]!='' else []) #filters the RDD, value is now a tuple with two elements, so we check if the first element is an empty string\r\n\twordcount_emptyspace=wordcount_cleaned_emptyspace.filter(lambda value: value[0]=='') #check to seee if there are any empty strings left\r\n\twordcount_emptyspace.take(1) #returns a tuple of empty string and its count\r\n\r\n#challenge: find the highest average rated movies and its genre\r\n\tratings = sc.textFile(\"/repository/movielens/ratings.csv\")\r\n\tratings.cache()\r\n\tratingHeader = ratings.first() #extract the first row, header\r\n\tratingsOnly = ratings.filter(lambda x: x != ratingHeader)\r\n\tmovieRatings=ratingsOnly.map(lambda line: line.split(\",\")) #RDD of lists[userId, movieId, rating, timeStamp]\r\n\tmovieRatings=movieRatings.map(lambda lst: (lst[1], float(lst[2]))) #RDD of tuples(movieId, rating), changing the second element to a float\r\n\tmovieRatings_sum=movieRatings.reduceByKey(lambda accum, n: accum+n) #RDD of tuples(movieId, ratings sum)\r\n\tmovieCount=movieRatings.countByKey() #countByKey returns a dictionary of keys and number of replications\r\n\tmovieRatings_avg=movieRatings_sum.map(lambda value: (value[0], value[1]/movieCount[value[0]]))#divides the sum stored in the second element of the tuple by the count of movieCount. value[0] identifies the key, i.e., movieId\r\n\tmovieRatings_sorted=movieRatings_avg.sortBy(lambda value: value[1], ascending=False) #sorts the RDD by value of the average rating stored in the second element of the tuple. It is sorted from largest to smallest\r\n\tgenres=sc.textFile(\"/repository/movielens/movies.csv\")\r\n\tgenres=genres.map(lambda line: line.split(\",\"))\r\n\tmovie_genre=genres.join(movieRatings_sorted) #joins the two RDD by key\r\n\t##there is a problem here. Observe that some movie titles in the genres RDD have commas in their record. Therefore, when joined, some part of the movie title after comma is mistaken for the genre of the movie\r\n#there is another way to calculate the average rating\r\n\tratings = sc.textFile(\"/repository/movielens/ratings.csv\")\r\n\tratings.cache()\r\n\tratingHeader = ratings.first() #extract the first row, header\r\n\tratingsOnly = ratings.filter(lambda x: x != ratingHeader)\r\n\tmovieRatings=ratingsOnly.map(lambda line: line.split(\",\")) #RDD of lists[userId, movieId, rating, timeStamp]\r\n\tmovieRatings=movieRatings.map(lambda lst: (lst[1], float(lst[2]))) #RDD of tuples(movieId, rating), changing the second element to a float\r\n\tgroupByKeyRatings = movieRatings.groupByKey() #groupByKey returns an RDD where elements are grouped by key as iterable object\r\n\t\t#it can be changed into a list by\r\n\t\tgroupByKeyRatings.groupByKey().mapValues(list)\r\n\t\tavgRatings = groupByKeyRatings.mapValues(lambda V: sum(V) / float(len(V)))\r\n#another way to sort the data is to show the data as sorted, after joining the two RDDs\r\n\tmovie_genre.takeOrdered(10, key = lambda x : -x[1][0]) #10 shows the first 10 element\r\n\t#recall that when joined the elements look like ('movieId', (rating, 'genres'))\r\n\t#x[1][0] points to the rating in the above structure\r\n\t#-x[1][0] descending sort according to x[1][0]\r\n\t#x[1][0] ascending sort according to x[1][0]\r\n\r\n\t"
}
] | 7 |
ciampluca/unsupervised_counting | https://github.com/ciampluca/unsupervised_counting | 3a3514b2305b4d88102fddf64f9d5145d1187003 | 4445d48f68da75359643bcf3003e90ef61d817e3 | d3ddb8f6af66b74d4fe65a6e16b323923c0a7d3d | refs/heads/master | 2023-03-10T03:45:47.722755 | 2021-03-05T08:08:08 | 2021-03-05T08:08:08 | 280,446,562 | 0 | 1 | null | null | null | null | null | [
{
"alpha_fraction": 0.7969231009483337,
"alphanum_fraction": 0.8092307448387146,
"avg_line_length": 53.16666793823242,
"blob_id": "7cc18bd08edb924d26b0db6087d974ff7088661c",
"content_id": "4861f0625b6e5bdd4ead2b0aaf1a84aa7d0515a0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 325,
"license_type": "permissive",
"max_line_length": 169,
"num_lines": 6,
"path": "/README.md",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "# UDA for Traffic Density Estimation and Counting\nCode and Resources for the traffic density estimation method from \"Unsupervised Domain Adaptation for Traffic Density Estimation and Counting,\" presented at VISAPP 2021.\n\n\n\nFurther instructions and code are coming soon...\n"
},
{
"alpha_fraction": 0.6054732203483582,
"alphanum_fraction": 0.6134549379348755,
"avg_line_length": 43.37202453613281,
"blob_id": "adfec997ca3b99a9807290b1a30cacc3c3ce9078",
"content_id": "4e8202c6ef874da55035ced0f26a805a0de898da",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14909,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 336,
"path": "/train_adv.py",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "import sys\nimport tqdm\nimport os\n\nimport torch\nimport torch.nn as nn\nfrom torch.utils.data import DataLoader\nfrom torch.utils.tensorboard import SummaryWriter\nfrom torch.utils.data.sampler import WeightedRandomSampler\nfrom torchvision.transforms.functional import normalize\n\nfrom config import Config\nfrom models.CSRNet import CSRNet\nfrom models.discriminator import FCDiscriminator\nfrom utils.utils import random_seed, get_transforms, compute_discriminator_accuracy\nfrom datasets.NDISPark import NDISPark\n\nimport warnings\nwarnings.filterwarnings(\"ignore\", category=UserWarning)\n\n# Config default values\nEPOCHS = 300\nBATCH_SIZE = 1\nROOT_DATASET = \"/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark\"\nROOT_VAL_DATASET = \"/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark\"\nLAMBDA_ADV_LOSS = 5e-05\nLAMBDA_DISC_LOSS = 0.0001\n\n\ndef main(args):\n print(args)\n\n # Loading configuration\n cfg = Config(\n epochs=args.epochs,\n batch_size=args.batch_size,\n root_dataset=args.source_dataset_path,\n root_val_dataset=args.target_dataset_path,\n lambda_adv_loss=args.lambda_adv,\n lambda_disc_loss=args.lambda_disc,\n )\n\n # Reproducibility\n seed = cfg.seed\n if torch.cuda.is_available():\n random_seed(seed, True)\n else:\n random_seed(seed, False)\n\n # Defining exp name\n exp_name = \"_Train{}_Val{}_{}_advLoss{}_discLoss{}_lr{}_batchSize{}\".\\\n format(cfg.root_dataset.rsplit(\"/\", 1)[1], cfg.root_val_dataset.rsplit(\"/\", 1)[1], cfg.model_name,\n cfg.lambda_adv_loss, cfg.lambda_disc_loss, cfg.lr_base, cfg.batch_size)\n\n # Creating tensorboard writer\n tensorboard_writer = SummaryWriter(comment=exp_name)\n\n # Loading model\n model = CSRNet().to(cfg.device)\n\n # Loading discriminator\n discriminator = FCDiscriminator(num_classes=1).to(cfg.device)\n\n # Defining criterion and optimizer for the model\n criterion = nn.MSELoss()\n optimizer = torch.optim.Adam(\n model.parameters(),\n lr=cfg.lr_base,\n )\n\n # Defining criterion and optimizer for the discriminator\n discriminator_criterion = nn.BCEWithLogitsLoss()\n discriminator_optimizer = torch.optim.Adam(\n params=discriminator.parameters(),\n lr=cfg.discriminator_lr_base,\n betas=(0.9, 0.99),\n )\n\n # Creating datasets\n train_dataset = NDISPark(\n root_dataset=cfg.root_dataset,\n phase=\"source\",\n transform=get_transforms(general_transforms=True, train=True),\n img_transform=get_transforms(img_transforms=True),\n target_transform=get_transforms(target_transforms=True),\n )\n val_dataset = NDISPark(\n root_dataset=cfg.root_val_dataset,\n phase=\"target\",\n transform=get_transforms(general_transforms=True),\n img_transform=get_transforms(img_transforms=True),\n target_transform=get_transforms(target_transforms=True),\n )\n target_dataset = NDISPark(\n root_dataset=cfg.root_val_dataset,\n phase=\"target\",\n transform=get_transforms(general_transforms=True),\n img_transform=get_transforms(img_transforms=True),\n target_transform=get_transforms(target_transforms=True),\n )\n\n # Creating samplers for target dataloader\n weights = [1.0] * len(target_dataset)\n target_sampler = WeightedRandomSampler(\n weights=weights,\n num_samples=len(train_dataset),\n replacement=True\n )\n\n # Creating dataloaders\n train_dataloader = DataLoader(\n train_dataset,\n shuffle=True,\n batch_size=cfg.batch_size,\n num_workers=cfg.num_workers,\n pin_memory=torch.cuda.is_available(),\n )\n target_dataloader = DataLoader(\n target_dataset,\n batch_size=cfg.batch_size,\n sampler=target_sampler,\n pin_memory=torch.cuda.is_available(),\n num_workers=cfg.num_workers,\n )\n val_dataloader = DataLoader(\n val_dataset,\n shuffle=False,\n batch_size=1,\n num_workers=cfg.num_workers,\n pin_memory=torch.cuda.is_available(),\n )\n\n # Defining labels for adversarial training\n source_label = 0\n target_label = 1\n\n min_mae, min_mse, min_are = sys.maxsize, sys.maxsize, sys.maxsize\n min_mae_epoch, min_mse_epoch, min_are_epoch = -1, -1, -1\n # Iterating over epochs...\n for epoch in range(1, cfg.epochs):\n model.train()\n discriminator.train()\n epoch_loss, disc_epoch_loss, model_epoch_loss, adv_epoch_loss = 0.0, 0.0, 0.0, 0.0\n epoch_mae, epoch_mse, epoch_are = 0.0, 0.0, 0.0\n epoch_disc_adv_acc, epoch_disc_1_acc, epoch_disc_2_acc = 0.0, 0.0, 0.0\n\n # Creating an iterator over the target dataloader\n target_iterator = iter(target_dataloader)\n\n # Training for one epoch\n for i, source_data in enumerate(tqdm.tqdm(train_dataloader)):\n # Setting grads to zero\n optimizer.zero_grad()\n discriminator_optimizer.zero_grad()\n\n ######################\n # Training the model #\n ######################\n\n # Don't accumulate grads in Discriminator\n for param in discriminator.parameters():\n param.requires_grad = False\n\n # TRAINING WITH SOURCE LABELED IMAGE\n # Retrieving source image and gt\n source_image = source_data['image'].to(cfg.device)\n source_gt_density_map = source_data['densitymap'].to(cfg.device)\n # Computing pred density map\n source_pred_density_map = model(source_image)\n # Computing loss\n source_loss = criterion(source_pred_density_map, source_gt_density_map)\n source_loss.backward()\n model_epoch_loss += source_loss.item()\n\n # Computing MAE, MSE and ARE\n mae = abs(source_pred_density_map.data.sum() - source_gt_density_map.data.sum())\n epoch_mae += mae.item()\n mse = (source_pred_density_map.data.sum() - source_gt_density_map.data.sum()) ** 2\n epoch_mse += mse.item()\n are = abs(source_pred_density_map.data.sum() - source_gt_density_map.data.sum()) / torch.clamp(\n source_gt_density_map.data.sum(), min=1)\n epoch_are += are.item()\n\n # TRAINING WITH TARGET UNLABELED IMAGE (ADV LOSS)\n # Retrieving target image\n target_data = target_iterator.__next__()\n target_image = target_data['image'].to(cfg.device)\n # Computing pred density map\n target_pred_density_map = model(target_image)\n # Computing output of the discriminator\n discriminator_pred = discriminator(target_pred_density_map)\n # Computing adv loss (between discriminator prediction and source-values label)\n source_values_label = torch.FloatTensor(discriminator_pred.data.size()).fill_(source_label).to(cfg.device)\n adv_loss = discriminator_criterion(discriminator_pred, source_values_label)\n adv_loss = cfg.lambda_adv_loss * adv_loss\n adv_loss.backward()\n adv_epoch_loss += adv_loss.item()\n\n # Computing accuracy of the discriminator\n disc_adv_acc = compute_discriminator_accuracy(source_values_label, discriminator_pred, cfg)\n epoch_disc_adv_acc += disc_adv_acc\n\n # Computing total loss and backwarding it\n loss = source_loss + adv_loss\n epoch_loss += loss.item()\n # loss.backward()\n optimizer.step()\n\n ##############################\n # Training the discriminator #\n ##############################\n\n # Bringing back requires_grad\n for param in discriminator.parameters():\n param.requires_grad = True\n\n # TRAINING WITH SOURCE LABELED IMAGE\n # Computing output of the discriminator\n source_pred_density_map = source_pred_density_map.detach()\n discriminator_pred = discriminator(source_pred_density_map)\n # Computing discriminator loss (between discriminator prediction and source-values label)\n source_values_label = torch.FloatTensor(discriminator_pred.data.size()).fill_(source_label).to(cfg.device)\n disc_loss = discriminator_criterion(discriminator_pred, source_values_label)\n disc_loss = cfg.lambda_disc_loss * disc_loss\n # Computing accuracy of the discriminator\n disc_1_acc = compute_discriminator_accuracy(source_values_label, discriminator_pred, cfg)\n epoch_disc_1_acc += disc_1_acc\n # Backwarding loss\n disc_epoch_loss += disc_loss.item()\n disc_loss.backward()\n\n # TRAINING WITH TARGET UNLABELED IMAGE\n # Computing output of the discriminator\n target_pred_density_map = target_pred_density_map.detach()\n discriminator_pred = discriminator(target_pred_density_map)\n # Computing discriminator loss (between discriminator prediction and target-values label)\n target_values_label = torch.FloatTensor(discriminator_pred.data.size()).fill_(target_label).to(cfg.device)\n disc_loss = discriminator_criterion(discriminator_pred, target_values_label)\n disc_loss = cfg.lambda_disc_loss * disc_loss\n # Computing accuracy of the discriminator\n disc_2_acc = compute_discriminator_accuracy(target_values_label, discriminator_pred, cfg)\n epoch_disc_2_acc += disc_2_acc\n # Backwarding loss\n disc_epoch_loss += disc_loss.item()\n disc_loss.backward()\n\n # Performing optimizer step\n discriminator_optimizer.step()\n\n tensorboard_writer.add_scalar('Train/Loss', epoch_loss / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/Disc_Loss', disc_epoch_loss / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/MAE', epoch_mae / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/MSE', epoch_mse / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/ARE', epoch_are / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/Discr_Adv_Acc', epoch_disc_adv_acc / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/Discr_1_Acc', epoch_disc_1_acc / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/Discr_2_Acc', epoch_disc_2_acc / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/Model_Loss', model_epoch_loss / len(train_dataset), epoch)\n tensorboard_writer.add_scalar('Train/Adv_Loss', adv_epoch_loss / len(train_dataset), epoch)\n\n # Validate the epoch\n model.eval()\n with torch.no_grad():\n epoch_mae, epoch_mse, epoch_are, epoch_loss = 0.0, 0.0, 0.0, 0.0\n\n for i, data in enumerate(tqdm.tqdm(val_dataloader)):\n # Retrieving image and density map\n image = data['image'].to(cfg.device)\n gt_density_map = data['densitymap'].to(cfg.device)\n\n # Computing output and val loss\n pred_density_map = model(image)\n val_loss = criterion(pred_density_map, gt_density_map)\n epoch_loss += val_loss.item()\n pred_density_map = pred_density_map.detach()\n\n # Computing MAE and MSE\n mae = abs(pred_density_map.data.sum() - gt_density_map.data.sum())\n epoch_mae += mae.item()\n mse = (pred_density_map.data.sum() - gt_density_map.data.sum()) ** 2\n epoch_mse += mse.item()\n are = abs(pred_density_map.data.sum() - gt_density_map.data.sum()) / torch.clamp(\n gt_density_map.data.sum(), min=1)\n epoch_are += are.item()\n\n epoch_mae /= len(val_dataset)\n epoch_mse /= len(val_dataset)\n epoch_are /= len(val_dataset)\n epoch_loss /= len(val_dataset)\n\n # Saving last model\n torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, \"last.pth\"))\n # Eventually saving best models\n if epoch_mae < min_mae:\n min_mae, min_mae_epoch = epoch_mae, epoch\n torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, str(epoch) + \"_mae.pth\"))\n if epoch_mse < min_mse:\n min_mse, min_mse_epoch = epoch_mse, epoch\n torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, str(epoch) + \"_mse.pth\"))\n if epoch_are < min_are:\n min_are, min_are_epoch = epoch_are, epoch\n torch.save(model.state_dict(), os.path.join(cfg.checkpoint_folder, str(epoch) + \"_are.pth\"))\n print('Epoch ', epoch, ' MAE: ', epoch_mae, ' Min MAE: ', min_mae, ' Min Epoch: ', min_mae_epoch,\n min_mae_epoch, 'MSE: ', epoch_mse, 'ARE: ', epoch_are)\n\n tensorboard_writer.add_scalar('Val/MAE', epoch_mae, epoch)\n tensorboard_writer.add_scalar('Val/MSE', epoch_mse, epoch)\n tensorboard_writer.add_scalar('Val/ARE', epoch_are, epoch)\n tensorboard_writer.add_scalar('Val/Loss', epoch_loss, epoch)\n tensorboard_writer.add_image(str(epoch) + '/Image',\n normalize(image.cpu().squeeze(dim=0),\n mean=[-0.5 / 0.225, -0.5 / 0.225, -0.5 / 0.225],\n std=[1 / 0.225, 1 / 0.225, 1 / 0.225]))\n tensorboard_writer.add_image(\n str(epoch) + '/Pred Count:' + str('%.2f' % (pred_density_map.cpu().squeeze(dim=0).sum())),\n torch.abs(pred_density_map.squeeze(dim=0)) / torch.max(pred_density_map.squeeze(dim=0)))\n tensorboard_writer.add_image(\n str(epoch) + '/GT count:' + str('%.2f' % (gt_density_map.cpu().squeeze(dim=0).sum())),\n gt_density_map.squeeze(dim=0) / torch.max(gt_density_map.squeeze(dim=0)))\n\n\nif __name__ == \"__main__\":\n import argparse\n\n parser = argparse.ArgumentParser(description=__doc__)\n\n parser.add_argument('--source-dataset-path', default=ROOT_DATASET, help='source dataset root path')\n parser.add_argument('--target-dataset-path', default=ROOT_VAL_DATASET, help='target dataset root path')\n parser.add_argument('--epochs', default=EPOCHS, type=int, help='number of total epochs to run')\n parser.add_argument('-b', '--batch-size', default=BATCH_SIZE, type=int, help='batch_size')\n parser.add_argument('--lambda-adv', default=LAMBDA_ADV_LOSS, type=float, help='lambda for the adv loss')\n parser.add_argument('--lambda-disc', default=LAMBDA_DISC_LOSS, type=float, help='lambda for the discriminator loss')\n\n args = parser.parse_args()\n\n main(args)\n"
},
{
"alpha_fraction": 0.5648686289787292,
"alphanum_fraction": 0.5756968259811401,
"avg_line_length": 37.9375,
"blob_id": "c58ca9b04021c0f274c69a77a3c6a2f6257b2f95",
"content_id": "452a7f9c82003e2d7bb68d092d28dc2ce7fe63b0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4987,
"license_type": "permissive",
"max_line_length": 122,
"num_lines": 128,
"path": "/datasets/NDISPark.py",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "import os\nfrom PIL import Image\nimport numpy as np\n\nimport torch\nfrom torch.utils.data import Dataset\nfrom torch.utils.data import DataLoader\nfrom torchvision.transforms.functional import to_pil_image, normalize\n\nfrom utils.utils import get_transforms\n\n\nclass NDISPark(Dataset):\n\n def __init__(self, root_dataset, phase=\"source\", transform=None, img_transform=None, target_transform=None):\n assert phase == \"source\" or phase == \"target\" or phase == \"test\", \"phase not present\"\n\n self.imgs_path = os.path.join(root_dataset, phase + '_data/images')\n self.densities_path = os.path.join(root_dataset, phase + '_data/densitymaps')\n self.data_files = [filename for filename in os.listdir(self.imgs_path)\n if os.path.isfile(os.path.join(self.imgs_path, filename))]\n self.transform = transform\n self.img_transform = img_transform\n self.target_transform = target_transform\n self.phase = phase\n\n # We just need number of vehicles present in the images\n if phase == \"test\":\n self.gt = {}\n gt_txt_path = os.path.join(root_dataset, phase + \"_data\", \"test_counting_gt.txt\")\n with open(gt_txt_path) as f:\n content = f.readlines()\n content = [x.strip() for x in content]\n content = content[:-1]\n for line in content:\n (key, val) = line.split()\n self.gt[key] = float(val)\n\n def __len__(self):\n return len(self.data_files)\n\n def __getitem__(self, index):\n index = index % len(self.data_files)\n fname = self.data_files[index]\n\n # Loading image\n img = Image.open(os.path.join(self.imgs_path, fname))\n if img.mode == 'L' or img.mode == 'RGBA':\n img = img.convert('RGB')\n\n # Loading density map. If we are in the test phase we just need the total number of vehicles, so density\n # maps are just fake black images\n if self.phase == \"test\":\n den_map = Image.new('F', img.size)\n else:\n den_map = Image.open(os.path.join(self.densities_path, fname.rsplit(\".\", 1)[0] + \".tiff\"))\n\n if self.transform is not None:\n img, den_map = self.transform((img, den_map))\n if self.img_transform is not None:\n img = self.img_transform(img)\n if self.target_transform is not None:\n den_map = self.target_transform(den_map)\n\n if self.phase == \"test\":\n # Retrieving gt number of vehicles\n key = fname.rsplit(\".\", 1)[0]\n num = self.gt.get(key)\n return {'image': img, 'densitymap': den_map, 'name': fname, 'num': num}\n else:\n return {'image': img, 'densitymap': den_map, 'name': fname}\n\n\n# # Testing code\n# if __name__ == \"__main__\":\n# root = \"/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark\"\n# root_val = \"/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark\"\n# phase = \"target\"\n# DIM_RESIZE = None\n#\n# train_dataset = NDISPark(\n# root_dataset=root,\n# transform=get_transforms(general_transforms=True, train=True, dim_resize=DIM_RESIZE),\n# img_transform=get_transforms(img_transforms=True),\n# target_transform=get_transforms(target_transforms=True),\n# )\n# val_dataset = NDISPark(\n# root_dataset=root_val,\n# phase=phase,\n# transform=get_transforms(general_transforms=True, dim_resize=DIM_RESIZE),\n# img_transform=get_transforms(img_transforms=True,),\n# target_transform=get_transforms(target_transforms=True),\n# )\n#\n# train_dataloader = DataLoader(\n# train_dataset,\n# shuffle=False,\n# batch_size=1,\n# )\n# val_dataloader = DataLoader(\n# val_dataset,\n# shuffle=False,\n# batch_size=1,\n# num_workers=1,\n# )\n#\n# for i, data in enumerate(train_dataloader):\n# name = data['name'][0].rsplit(\".\", 1)[0]\n# print(name)\n#\n# image = data['image'].squeeze(dim=0)\n# image = normalize(image, mean=[-0.5 / 0.225, -0.5 / 0.225, -0.5 / 0.225], std=[1 / 0.225, 1 / 0.225, 1 / 0.225])\n# pil_image = to_pil_image(image)\n# pil_image.save(os.path.join(\"../output_debug/\", name + \".png\"))\n#\n# if phase == \"test\":\n# num = data['num'].cpu().item()\n# print(num)\n# else:\n# density_map = data['densitymap'].squeeze(dim=0)\n# pil_density_map = to_pil_image((density_map/torch.max(density_map))*255)\n# np_density_map = density_map.cpu().detach().numpy().astype(np.float32)\n# unique, counts = np.unique(np_density_map, return_counts=True)\n# num = np.sum(np_density_map)\n# num_double_check = len(unique)-1\n# pil_density_map.save(os.path.join(\"../output_debug/\", \"density_\" + name + \".tiff\"))\n# print(num)\n# print(num_double_check)\n\n\n\n"
},
{
"alpha_fraction": 0.6489594578742981,
"alphanum_fraction": 0.6626505851745605,
"avg_line_length": 49.69444274902344,
"blob_id": "c38f8e43fbd9f3cb081f76df98bbb3f22065c1a5",
"content_id": "5e9bd519e4de79dd4356da02e19e49f407fb5907",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1826,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 36,
"path": "/config.py",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "import os\nimport time\n\nimport torch\n\n\nclass Config:\n\n def __init__(self, batch_size=None, lr_base=None, discriminator_lr_base=None, epochs=None, root_dataset=None,\n root_val_dataset=None, checkpoint_folder=None, momentum=0.9, weight_decay=0.0001, model_name=None,\n input_dim_resize=480, num_workers=4, lambda_adv_loss=0, lambda_disc_loss=0, dataset_random_split=None,\n dataset_roi_masked=None, seed=10):\n\n self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n self.batch_size = 1 if not batch_size else batch_size\n self.lr_base = 1e-5 if not lr_base else lr_base\n self.discriminator_lr_base = 1e-5 if not discriminator_lr_base else discriminator_lr_base\n self.momentum = momentum\n self.dataset_random_split = dataset_random_split\n self.dataset_roi_masked = dataset_roi_masked\n self.weight_decay = weight_decay\n self.lambda_adv_loss = lambda_adv_loss\n self.lambda_disc_loss = lambda_disc_loss\n self.input_dim_resize = input_dim_resize\n self.epochs = 100 if not epochs else epochs\n self.num_workers = num_workers\n self.root_dataset = './data/NDISPark' if not root_dataset else root_dataset\n self.root_val_dataset = './data/NDISPark' if not root_val_dataset else root_val_dataset\n self.dataset_name = self.root_dataset.rsplit(\"/\", 1)[1]\n self.model_name = 'CSRNet' if not model_name else model_name\n self.date_and_time = time.strftime(\"%Y%m%d%H%M\")\n self.checkpoint_folder = os.path.join('./checkpoints', self.dataset_name, self.model_name, self.date_and_time) \\\n if not checkpoint_folder else checkpoint_folder\n self.seed = seed\n\n os.makedirs(self.checkpoint_folder, exist_ok=True)\n\n"
},
{
"alpha_fraction": 0.5773895978927612,
"alphanum_fraction": 0.5846676230430603,
"avg_line_length": 26.851350784301758,
"blob_id": "b6118ae13aa19caf579a25bfb54b6d286fe7e075",
"content_id": "33d6ba41c46a2a94a9461c46e88b3f484cb08ad8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2061,
"license_type": "permissive",
"max_line_length": 101,
"num_lines": 74,
"path": "/utils/transforms.py",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "import random\nfrom PIL import Image\nimport numpy as np\n\nimport torchvision.transforms.functional as F\n\n\nclass RandomHorizontalFlip(object):\n\n def __call__(self, img_and_density):\n \"\"\"\n img: PIL.Image\n img_and_density: PIL.Image\n \"\"\"\n img, density_map = img_and_density\n\n if random.random() < 0.5:\n return img.transpose(Image.FLIP_LEFT_RIGHT), density_map.transpose(Image.FLIP_LEFT_RIGHT)\n else:\n return img, density_map\n\n\nclass PairedCrop(object):\n \"\"\"\n Paired Crop for both image and its density map.\n Note that due to the maxpooling in the neural network,\n we must promise that the size of input image is the corresponding factor.\n \"\"\"\n\n def __init__(self, factor=16):\n self.factor = factor\n\n @staticmethod\n def get_params(img, factor):\n w, h = img.size\n if w % factor == 0 and h % factor == 0:\n return 0, 0, h, w\n else:\n return 0, 0, h - (h % factor), w - (w % factor)\n\n def __call__(self, img_and_density):\n \"\"\"\n img_and_density: PIL.Image\n \"\"\"\n img, density_map = img_and_density\n\n i, j, th, tw = self.get_params(img, self.factor)\n\n img = F.crop(img, i, j, th, tw)\n density_map = F.crop(density_map, i, j, th, tw)\n\n return img, density_map\n\n\nclass CustomResize(object):\n\n def __init__(self, dim=480):\n self.dim = dim\n\n def __call__(self, img_and_density):\n img, density_map = img_and_density\n np_den_map = np.array(density_map)\n num_objs = np.sum(np_den_map)\n\n img = F.resize(img, size=self.dim, interpolation=Image.ANTIALIAS)\n density_map = F.resize(density_map, size=self.dim, interpolation=Image.NEAREST)\n\n # Ensure that sum=#objects after resizing\n np_den_map = np.array(density_map)\n if np.sum(np_den_map) != 0.0:\n np_den_map = num_objs * np_den_map / np.sum(np_den_map)\n density_map = Image.fromarray(np_den_map, mode=\"F\")\n\n return img, density_map\n"
},
{
"alpha_fraction": 0.5900337100028992,
"alphanum_fraction": 0.6003851890563965,
"avg_line_length": 37.82242965698242,
"blob_id": "afd86796b857a2e89747a33b828ed67d0df8411f",
"content_id": "8549afde25a00c0631ef01d0361975583f2d899d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4154,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 107,
"path": "/test.py",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "import os\nimport tqdm\nfrom PIL import Image\nimport numpy as np\n\nimport torch\nfrom torch.utils.data import DataLoader\n\nfrom models.CSRNet import CSRNet\nfrom datasets.NDISPark import NDISPark\nfrom utils.utils import get_transforms\n\n\n# Parameters\nROOT_DATASET = \"/media/luca/Dati_2_SSD/datasets/vehicles_counting/NDISPark\"\nPHASE = \"test\"\nMODEL_NAME = \"CSRNet\"\nMODEL_CHECKPOINT = \"/home/luca/workspace/unsupervised_counting/checkpoints/NDISPark/CSRNet/202008041749/74_mae.pth\"\nGT_TXT_FILE = True\nRESULTS = \"/home/luca/Downloads/temp_results/NDISPark/basic/results\"\nPREDS = \"/home/luca/Downloads/temp_results/NDISPark/basic/preds\"\n\n\ndef main():\n torch.backends.cudnn.enabled = False\n device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n\n # Creating output folder\n preds_output_folder = os.path.join(PREDS, \"obtained_with_best_model_mae\")\n if not os.path.exists(preds_output_folder):\n os.makedirs(preds_output_folder)\n\n # Loading model\n model = CSRNet()\n\n # Loading checkpoint\n model.load_state_dict(torch.load(MODEL_CHECKPOINT))\n model.to(device)\n model.eval()\n\n dataset = NDISPark(\n root_dataset=ROOT_DATASET,\n phase=PHASE,\n transform=get_transforms(general_transforms=True),\n img_transform=get_transforms(img_transforms=True),\n target_transform=get_transforms(target_transforms=True),\n )\n\n dataloader = DataLoader(\n dataset,\n shuffle=False,\n batch_size=1,\n num_workers=1,\n pin_memory=torch.cuda.is_available(),\n )\n\n total_mae, total_mse, total_are = 0.0, 0.0, 0.0\n with torch.no_grad():\n for i, data in enumerate(tqdm.tqdm(dataloader)):\n # Retrieving image and density map\n image = data['image'].to(device)\n gt_density_map = data['densitymap'].to(device)\n\n # Computing pred density map\n pred_density_map = model(image)\n\n # Computing MAE, MSE and ARE\n if GT_TXT_FILE:\n gt_num = data['num'].cpu().item()\n mae = abs(pred_density_map.data.sum() - gt_num)\n total_mae += mae.item()\n mse = (pred_density_map.data.sum() - gt_num) ** 2\n total_mse += mse.item()\n are = abs(pred_density_map.data.sum() - gt_num) / gt_num\n total_are += are.item()\n else:\n mae = abs(pred_density_map.data.sum() - gt_density_map.data.sum())\n total_mae += mae.item()\n mse = (pred_density_map.data.sum() - gt_density_map.data.sum()) ** 2\n total_mse += mse.item()\n are = abs(pred_density_map.data.sum() - gt_density_map.data.sum()) / torch.clamp(\n gt_density_map.data.sum(), min=1)\n total_are += are.item()\n\n density_to_save = pred_density_map.detach()\n density_to_save = density_to_save.squeeze(0).squeeze(0).cpu().numpy()\n\n density_to_save = np.absolute(density_to_save)\n density_to_save = 255 * (density_to_save / np.max(density_to_save))\n density_to_save = density_to_save.astype(np.uint8)\n # density_to_save = (255 * (density_to_save - np.min(density_to_save)) / (\n # np.max(density_to_save) - np.min(density_to_save))).astype(np.uint8)\n pil_density = Image.fromarray(density_to_save)\n pil_density.save(os.path.join(preds_output_folder, data['name'][0].rsplit(\".\", 1)[0] + \".png\"))\n # pil_density.save(os.path.join(preds_output_folder, data['name'][0].rsplit(\".\", 1)[0] + \".tiff\"))\n\n print(\"Image: {}, AE: {}, SE: {}, RE: {}\".format(data['name'][0], mae.item(), mse.item(), are.item()))\n\n string_to_write = \"Model: {}, Checkpoint: {}, MAE: {}, MSE: {}, ARE: {}\".\\\n format(MODEL_NAME, MODEL_CHECKPOINT, total_mae/len(dataset), total_mse/len(dataset), total_are/len(dataset))\n with open(os.path.join(RESULTS, \"obtained_with_best_model_mae.txt\"), \"w\") as result_file:\n result_file.write(string_to_write)\n print(string_to_write)\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6875945329666138,
"alphanum_fraction": 0.7019667029380798,
"avg_line_length": 32.04999923706055,
"blob_id": "7391c81b8009d7afc663e2a83dd73a1e56466628",
"content_id": "67e1ff8659502586de82c8a38618be2b6c879a50",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1322,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 40,
"path": "/utils/utils.py",
"repo_name": "ciampluca/unsupervised_counting",
"src_encoding": "UTF-8",
"text": "from PIL import Image\nimport numpy as np\n\nimport torch\nfrom torchvision.transforms import Compose, ToTensor, Normalize\nfrom utils.transforms import PairedCrop, RandomHorizontalFlip, CustomResize\n\n\ndef get_transforms(general_transforms=None, img_transforms=None, target_transforms=None, train=None, dim_resize=None):\n\n transforms_list = []\n\n if general_transforms:\n if dim_resize:\n transforms_list.append(CustomResize(dim=dim_resize))\n if train:\n transforms_list.append(RandomHorizontalFlip())\n transforms_list.append(PairedCrop())\n if img_transforms:\n transforms_list.append(ToTensor())\n transforms_list.append(Normalize(mean=[0.5, 0.5, 0.5], std=[0.225, 0.225, 0.225]))\n if target_transforms:\n transforms_list.append(ToTensor())\n\n return Compose(transforms_list)\n\n\ndef random_seed(seed_value, use_cuda):\n np.random.seed(seed_value) # cpu vars\n torch.manual_seed(seed_value) # cpu vars\n if use_cuda:\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False\n\n\ndef compute_discriminator_accuracy(label, pred, cfg):\n boolean_label = label.type(torch.BoolTensor).to(cfg.device)\n acc = torch.mean((torch.eq(torch.sigmoid(pred) > .5, boolean_label)).type(torch.FloatTensor))\n\n return acc\n"
}
] | 7 |
Tubbxl/Athena_Src | https://github.com/Tubbxl/Athena_Src | 5ad65686fd9fe5baed0dbda19f31c536f33e253d | 53d2c0a4829b6eff0443546da37a6461166cb6c7 | 304ea2162378f3d7bbdb5a95898bf6a4fdbbb9e3 | refs/heads/master | 2022-01-29T23:58:42.551053 | 2018-12-04T09:56:01 | 2018-12-04T09:56:01 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6582159399986267,
"alphanum_fraction": 0.668544590473175,
"avg_line_length": 19.423076629638672,
"blob_id": "f595dc6264a3588437532a1d5ea60f80f18058e7",
"content_id": "83d4730301159838aebae2ec854865b263bf65e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1073,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 52,
"path": "/athena/examples/LCM/Singlecar/control/common/get_time.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n/**\n * @file logging.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n#ifndef COMMON_GET_TIME_H_\n#define COMMON_GET_TIME_H_\n\n#include <time.h>\n#include <sys/time.h>\n\n/**\n* @namespace athena::control\n* @brief athena::control\n*/\nnamespace athena{\nnamespace control{\n/**\n * @class GetTime\n * @brief 获取时间.\n */\nclass GetTime{\npublic:\n GetTime() = default;\n ~GetTime() = default;\n\ntypedef struct{\n int year;\n int month;\n int day;\n int hour;\n int minute;\n int second;\n int millisecond;\n}TimeFormat;\n\nstatic TimeFormat gps_local_time_;\n\nstatic void GetGpsCurrentTime(int &year,int &month,int &day,int &hour,int &minute,int &second,int &millisecond);\n\nstatic void SetGpsCurrentUtcTime(int utc_year,int utc_month,int utc_day,int utc_hour,int utc_minute,int utc_second,int utc_millisecond);\n\nstatic void GetSystemTime(int &year,int &month,int &day,int &hour,int &minute,int &second,int &millisecond);\n\n};\n\n}//namespace control\n}//namespace athena\n#endif //COMMON_GET_TIME_H_\n\n\n"
},
{
"alpha_fraction": 0.5629228949546814,
"alphanum_fraction": 0.5683355927467346,
"avg_line_length": 20.114286422729492,
"blob_id": "3624c689eca84f5e14701c3c1d6575c28535b749",
"content_id": "a8271a7e2625084bae4c7f2d4f01f47eac25f8ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 769,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 35,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/BranchLane.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "WINDOWS-1252",
"text": "#pragma once\n\nclass BranchLane\n{\nprivate:\n BOOL _bFlag;\t// ¨C3???E¡ªL???t?¡ë?O\n LaneMarker *_LaneMarkerMain;\t// ¨C{?¨¹¡®¡è¡±¡¯?¨¹\n LaneMarker *_LaneMarkerBranch;\t// ?a?¨°¡®¡è¡±¡¯?¨¹\npublic:\n BranchLane(void):_bFlag(FALSE), _LaneMarkerMain(NULL), _LaneMarkerBranch(NULL)\t{}\n void flag(BOOL v)\n {\n _bFlag = v;\n }\n BOOL flag(void)\n {\n return _bFlag;\n }\n LaneMarker *getLaneMarkerMain(void)\n {\n return _LaneMarkerMain;\n }\n void getLaneMarkerMain(LaneMarker *p)\n {\n _LaneMarkerMain = p;\n }\n LaneMarker *getLaneMarkerBranch(void)\n {\n return _LaneMarkerBranch;\n }\n void getLaneMarkerBranch(LaneMarker *p)\n {\n _LaneMarkerBranch = p;\n }\n};\n"
},
{
"alpha_fraction": 0.6296992301940918,
"alphanum_fraction": 0.6552631855010986,
"avg_line_length": 39.30303192138672,
"blob_id": "967742dbbaf87c2a40705cb37b0daa18eec43af6",
"content_id": "eb44cbb70dad72461c5b6aaa66bb326c5e2a03f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2692,
"license_type": "no_license",
"max_line_length": 205,
"num_lines": 66,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerPoint.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "WINDOWS-1252",
"text": "#pragma once\n#include \"../utils/type.h\"\n\nclass LaneMarkerPoint {\nprivate:\n\tint _iI;\t// 2???3??¡¯??¨¤?W[pix]\n\tint _iJ;\t// 2???3?¡???¨¤?W[pix]\n\tdouble _dEdgeStrength;\t// ?G?b?W?-¡°x\n\tdouble _dEdgeGradient;\t// ?G?b?W???¨¹[rad]\n\tint _iProcLineIndex;\n\tdouble _dDisparity;\t\t// ???¡¤[sub-pix]\n\tdouble _adPos3D[3];\t\t// 3???3?¨¤?W{[mm],[mm],[mm]}\n\tBOOL _bAboveGround;\t\t// ¡ª¡ì¡®¨¬?¡§???¨¬¡°_?t?¡ë?O\n\npublic:\n\tinline\tLaneMarkerPoint()\t: _iI(-1), _iJ(-1), _dEdgeStrength(-1), _dEdgeGradient(0), _iProcLineIndex(-1), _dDisparity(-1)\t{\t_adPos3D[0] = 0.0;\t_adPos3D[1] = 0.0;\t_adPos3D[2] = 0.0;\t_bAboveGround = FALSE;\t}\n\tinline LaneMarkerPoint(int iIsrc, int iJsrc, double dEdgeStrength, double dGrandient, int iProcLineIndex, double dDisparity, double dX = 0.0,\tdouble dY = 0.0, double dZ = 0.0)\t{\n\t\t_iI = iIsrc;\n\t\t_iJ = iJsrc;\n\t\t_dEdgeStrength = dEdgeStrength;\n\t\t_dEdgeGradient = dGrandient;\n\t\t_iProcLineIndex = iProcLineIndex;\n\t\t_dDisparity = dDisparity;\n\t\t_adPos3D[0] = dX;\n\t\t_adPos3D[1] = dY;\n\t\t_adPos3D[2] = dZ;\n\t\t_bAboveGround = FALSE;\n\t}\n\tinline LaneMarkerPoint(LaneMarkerPoint *pSrc)\t{\n\t\t_iI = pSrc->getIsrc();\n\t\t_iJ = pSrc->getJsrc();\n\t\t_dEdgeStrength = pSrc->getEdgeStrength();\n\t\t_dEdgeGradient = pSrc->getEdgeGradient();\n\t\t_iProcLineIndex = pSrc->getProcLineIndex();\n\t\t_dDisparity = pSrc->getDisparity();\n\t\t_adPos3D[0] = pSrc->X3D();\n\t\t_adPos3D[1] = pSrc->Y3D();\n\t\t_adPos3D[2] = pSrc->Z3D();\n\t\t_bAboveGround = pSrc->getAboveGround();\n\t}\n\n\tinline int getIsrc(void)\t\t\t\t{\treturn _iI;\t\t\t\t}\n\tinline void setIsrc(int iV)\t\t\t\t{\t_iI = iV;\t\t\t\t}\n\tinline int getJsrc(void)\t\t\t\t{\treturn _iJ;\t\t\t\t}\n\tinline void setJsrc(int iV)\t\t\t\t{\t_iJ = iV;\t\t\t\t}\n\tinline double getEdgeStrength(void)\t\t{\treturn _dEdgeStrength;\t}\n\tinline void setEdgeStrength(double dV)\t{\t_dEdgeStrength = dV;\t}\n\tinline double getEdgeGradient(void)\t\t{\treturn _dEdgeGradient;\t}\n\tinline void setEdgeGradient(double dV)\t{\t_dEdgeGradient = dV;\t}\n\tinline double getDisparity(void)\t\t{\treturn _dDisparity;\t\t}\n\tinline void setDisparity(double dV)\t\t{\t_dDisparity = dV;\t\t}\n\tinline int getProcLineIndex(void)\t\t{\treturn _iProcLineIndex;\t}\n\tinline void setProcLineIndex(int iV)\t{\t_iProcLineIndex = iV;\t}\n\n\tinline double *getPos3D(void)\t{\treturn _adPos3D;\t}\n\tinline double X3D(void)\t{\treturn _adPos3D[0];\t}\n\tinline double Y3D(void)\t{\treturn _adPos3D[1];\t}\n\tinline double Z3D(void)\t{\treturn _adPos3D[2];\t}\n\tinline void X3D(double dV)\t{\t_adPos3D[0] = dV;\t}\n\tinline void Y3D(double dV)\t{\t_adPos3D[1] = dV;\t}\n\tinline void Z3D(double dV)\t{\t_adPos3D[2] = dV;\t}\n\tinline BOOL getAboveGround(void)\t{\treturn _bAboveGround;\t}\n\tinline void setAboveGround(BOOL v)\t{\t_bAboveGround = v;\t}\n};\n\ntypedef LaneMarkerPoint * ptrLaneMarkerPoint;\n"
},
{
"alpha_fraction": 0.5296404361724854,
"alphanum_fraction": 0.5549076795578003,
"avg_line_length": 15.596774101257324,
"blob_id": "2a032b5e142a1bcbef02980f6637360cc34a2c89",
"content_id": "782fbe2f52974213ed0e404a3559ca4544c5bb3a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1317,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 62,
"path": "/athena/core/arm/Common/include/distributed_runtime/timer/nad_timer.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_timer.h\n * 时 间:2016-03-02\n * 描 述:定时器的基类\n-------------------------------------------------------*/\n#ifndef _nad_timer_H\n#define _nad_timer_H\n\n#include \"nad_base.h\"\n//定时器的基类\nclass nad_timer\n{\npublic:\n //定时器时间间隔\n int64 interval_ms;\n\n //上次handle的时间\n int64 last_ms;\n\n //计数器,配合counter_is()实现几个周期触发一次的定时器\n int64_t counter;\n\npublic:\n //构造析构函数\n nad_timer(int64 interval_ms);\n virtual ~nad_timer();\n\n //执行定时器,传入当前的时间(毫秒)\n virtual void handle() = 0;\n\n //判断计数器,比如counter_is(5)==true时表示每1秒调用一次(200ms*5=1s)\n bool counter_is(int64_t counter);\n};\n\n//定时器管理器的基类\nclass nad_timer_list\n{\n//protected:\npublic:\n //定时器列表\n vector<nad_timer *> timer_list;\n\npublic:\n //构造析构函数\n nad_timer_list();\n virtual ~nad_timer_list();\n\n //新增定时器\n int add_timer(nad_timer *timer);\n\n //删除定时器\n int delete_timer(nad_timer *timer);\n\n //执行定时器,传入当前的时间(毫秒)\n void handle_timer();\n};\n\n//全局定时器数组\nextern nad_timer_list g_ltimer;\n\n\n#endif\n"
},
{
"alpha_fraction": 0.5503355860710144,
"alphanum_fraction": 0.5503355860710144,
"avg_line_length": 21.923076629638672,
"blob_id": "7bccae1d8c12e420d2e3ff39dfb34251c65c5e09",
"content_id": "422b902978057f404ce64931691eeab1768a66ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 298,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 13,
"path": "/athena/core/x86/Camera/lane_detect/include/Matrix/LeastSquares.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"Matrix.h\"\n#include <math.h>\nnamespace ls{\n class LeastSquares{\n private:\n Matrix mCoffe;\n int mPow;\n\n public:\n double getY(double x);\n void setPoints(const std::vector<double>& x, const std::vector<double>& y, int pow);\n };\n}\n"
},
{
"alpha_fraction": 0.4060705602169037,
"alphanum_fraction": 0.42165708541870117,
"avg_line_length": 16.623188018798828,
"blob_id": "bcdfcd05e88ceaf2a44df59491afb460910b492d",
"content_id": "7784ced957b03b26dcede2bc66d60433b71b4f24",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1371,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 69,
"path": "/athena/core/x86/Planning/include/common/car_state.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include <string>\n\n/**\n * @class CarPose\n * @brief 车辆位姿。\n */\nclass CarPose\n{\npublic:\n /**\n * @brief 构造函数\n */\n CarPose()\n {\n CurrentX_ = 0.0;\n CurrentY_ = 0.0;\n CurrentZ_ = 0.0;\n Current_heading_ = 0.0;\n Current_pitch = 0.0;\n Current_roll_ = 0.0;\n }\n /**\n * @brief 析构函数\n */\n virtual ~CarPose()\n {\n\n }\n\n double CurrentX_, CurrentY_, CurrentZ_; ///<车在空间坐标系下的坐标\n double Current_heading_, Current_pitch, Current_roll_; ///<车当姿态角\n\n};\n\n/**\n * @class CarState\n * @brief 车辆状态。\n */\nclass CarState\n{\npublic:\n /**\n * @brief 构造函数\n */\n CarState()\n {\n CurrentS_ = 0.0;\n car_speed_ = 0.0;\n steer_angle_ = 0.0;\n at_status_ = 0;\n }\n /**\n * @brief 析构函数\n */\n virtual ~CarState()\n {\n\n }\n\npublic:\n CarPose car_pose_; ///车辆位姿\n double CurrentS_; ///<车当前里程\n double car_speed_; ///<车速,从车身can或者惯导信息里获得\n double steer_angle_; ///<方向盘转角\n int at_status_; ///<档位信息\n double GPS_time_; ///<GPS时间\n\n};\n\n\n\n"
},
{
"alpha_fraction": 0.5988857746124268,
"alphanum_fraction": 0.6569836735725403,
"avg_line_length": 18.323076248168945,
"blob_id": "9426af23aab51d1b0e4255c0fd54e3a94084129e",
"content_id": "855e037a37fe109959dc3a19636511bdcd112793",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5138,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 260,
"path": "/athena/core/x86/Camera/vision_ssd_detect/kalman/kalmanfilter.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/***************By yuanjun**************************/\n#include \"kalmanfilter.h\"\n#include \"math.h\"\n#define delta_t 0.1\n\n\nKalmanFilter::KalmanFilter()\n{\n\n}\n\nKalmanFilter::KalmanFilter(camera_obj &single_obj,double rollAngle)\n{\n\tX1.SetMatrixRowAndCol(4,1);\n\tX2.SetMatrixRowAndCol(4,1);\n\tA.SetMatrixRowAndCol(4,4); //X(k)=A X(k-1)+W(k)\n\tH.SetMatrixRowAndCol(2,4); //Z(k)=H X(k)+V(k)\n\tZ.SetMatrixRowAndCol(2,1); //Z(k)是k时刻的测量值\n\tK.SetMatrixRowAndCol(4,2);\n\tQ.SetMatrixRowAndCol(4,4); //过程噪声\n\tR.SetMatrixRowAndCol(2,2); //测量噪声\n\tP1.SetMatrixRowAndCol(4,4);\n\tP2.SetMatrixRowAndCol(4,4);\n\tCoordinateRoll.SetMatrixRowAndCol(4,4);\n //by dhx\n\tX1.m_pTMatrix[0][0]=single_obj.lat_pos;\n\tX1.m_pTMatrix[1][0]=single_obj.lon_pos;\n\tX1.m_pTMatrix[2][0]=X1.m_pTMatrix[3][0]=25;\n\n\n\tA.Eye();//A的转置矩阵\n\tA.m_pTMatrix[0][2]=delta_t;\n\tA.m_pTMatrix[1][3]=delta_t;\n\n\tH.m_pTMatrix[0][0]=1;\n\tH.m_pTMatrix[1][1]=1;\n\n\tQ.Eye(); //Q的转置矩阵\n\n\tR.Eye(); //R的转置矩阵\n\n\tP1.Eye(); //P1的转置矩阵\n\n\tangle=rollAngle;\n\n\tCalCoorRoll(angle);\n}\n\nKalmanFilter::~KalmanFilter(void)\n{\n\n}\n\nvoid KalmanFilter::InitialKalmanFilter(camera_obj &single_obj)\n{\n\tX1.SetMatrixRowAndCol(4,1);\n\tX2.SetMatrixRowAndCol(4,1);\n\tA.SetMatrixRowAndCol(4,4);\n\tH.SetMatrixRowAndCol(2,4);\n\tZ.SetMatrixRowAndCol(2,1);\n\tK.SetMatrixRowAndCol(4,2);\n\tQ.SetMatrixRowAndCol(4,4);\n\tR.SetMatrixRowAndCol(2,2);\n\tP1.SetMatrixRowAndCol(4,4);\n\tP2.SetMatrixRowAndCol(4,4);\n\n\tX1.m_pTMatrix[0][0]=single_obj.lat_pos;\n\tX1.m_pTMatrix[1][0]=single_obj.lon_pos;\n\tX1.m_pTMatrix[2][0]=X1.m_pTMatrix[3][0]=1;\n\n\tA.Eye();\n\tA.m_pTMatrix[0][2]=delta_t;\n\tA.m_pTMatrix[1][3]=delta_t;\n\n\tH.m_pTMatrix[0][0]=1;\n\tH.m_pTMatrix[1][1]=1;\n\n\tQ.Eye();\n\n\tR.Eye();\n\n\tP1.Eye();\n\n}\n\nvoid KalmanFilter::timeUpdate()\n{\n\tCMatrix T1,T2;\n\tX2 = A * X1;\n\tT1 = A.Transpose();\n\tP2 = A * P1 * T1 + Q;\n\n//\tX2=A*X1;\n//\tP2=A*P1*A.Transpose()+Q;\n}\n\nvoid KalmanFilter::stateUpdate(camera_obj &single_obj,double rollangle,double *pos)\n{\n CMatrix T1,T2;\n\t//Z.m_pTMatrix[0][0]=glu.avg_x;\n\t//Z.m_pTMatrix[1][0]=glu.avg_z;\n\n\t//CMatrix temp(2,2);\n\t// T1= H.Transpose();\n\t//temp=H*P2*T1+R;\n\t// T2= temp.Inverse();\n\t//K=P2*T1*T2;\n\n\t//CalCoorRoll(rollangle);\n\n\t//X2=CoordinateRoll*X2;\n\n\t// CMatrix T3,T4,T5;\n\n\t// T4=H*X2;\n\t// T3=Z-T4;\n\t// T5=K*T3;\n\t//X1=X2+T5;\n\n\t//CMatrix I(4,4);\n\t//I.Eye();\n\n\t// CMatrix T6,T7,T8;\n\t// T6=K*H;\n\t// T7=I-T6;\n\t//P1=T7*P2;\n\n\tZ.m_pTMatrix[0][0]=single_obj.lat_pos;\n\tZ.m_pTMatrix[1][0]=single_obj.lon_pos;\n\n//\tCMatrix temp(2,2);\n//\ttemp=H*P2*H.Transpose()+R;\n\n CMatrix temp(2,2);\n T1= H.Transpose();\n temp=H*P2*T1+R;\n\n T2= temp.Inverse();\n K=P2*T1*T2;\n\n//\tK=P2*H.Transpose()*temp.Inverse();\n\n\tCalCoorRoll(rollangle);\n\n\tX2=CoordinateRoll*X2;\n\n\tX2.m_pTMatrix[0][0]+=pos[0];\n\tX2.m_pTMatrix[1][0]+=pos[2];\n\n\n//\tX1=X2+K*(Z-H*X2);\n\n CMatrix T3,T4,T5;\n\n T4=H*X2;\n T3=Z-T4;\n T5=K*T3;\n X1=X2+T5;\n\n\tCMatrix I(4,4);\n\tI.Eye();\n\n//\tP1=(I-K*H)*P2;\n\n CMatrix T6,T7,T8;\n T6=K*H;\n T7=I-T6;\n P1=T7*P2;\n\n}\n\nObjectState KalmanFilter::GetCurrentState() //得到当前状态量\n{\n\tObjectState movestate;\n\tmovestate.x_position=X1.m_pTMatrix[0][0];\n\tmovestate.z_positon=X1.m_pTMatrix[1][0];\n\tmovestate.x_speed=X1.m_pTMatrix[2][0];\n\tmovestate.z_speed=X1.m_pTMatrix[3][0];\n\n\treturn movestate;\n}\n\nObjectState KalmanFilter::GetPredictState() //得到预测状态\n{\n\tObjectState movestate;\n\n\tmovestate.x_position=X2.m_pTMatrix[0][0];\n\tmovestate.z_positon=X2.m_pTMatrix[1][0];\n\tmovestate.x_speed=X2.m_pTMatrix[2][0];\n\tmovestate.z_speed=X2.m_pTMatrix[3][0];\n\n\treturn movestate;\n}\n\nMeasurement KalmanFilter::GetPredictMeasurement(double rollAngle,double *pos) //预测测量\n{\n\tMeasurement predictMeasurement;\n\tCMatrix temp(2,1);\n\tCalCoorRoll(rollAngle);\n\tX2=CoordinateRoll*X2;\n\tX2.m_pTMatrix[0][0]+=pos[0];\n\tX2.m_pTMatrix[1][0]+=pos[2];\n\ttemp=H*X2;\n\tpredictMeasurement.x_measurement=temp.m_pTMatrix[0][0];\n\tpredictMeasurement.z_measurement=temp.m_pTMatrix[1][0];\n\n\treturn predictMeasurement;\n}\n\nCMatrix KalmanFilter::CalMeasureDeviation() //测量偏差\n{\n\tCMatrix Deviation(2,2);\n\tCMatrix standardDeviation(2,2);\n\n\tCMatrix T6,T7,T8;\n\tT6=H.Transpose();\n\tT7=P2*T6;\n\tDeviation=H*T7+R;\n\n//\tDeviation=H*P2*H.Transpose()+R;\n\n\tfor (int i=0;i<2;i++)\n\t{\n\t\tfor(int j=0;j<2;j++)\n\t\t{\n\t\t\tstandardDeviation.m_pTMatrix[i][j]=sqrt(Deviation.m_pTMatrix[i][j]);\n\t\t}\n\t}\n\n\treturn standardDeviation;\n}\n\nKalmanFilter& KalmanFilter::operator = (const KalmanFilter& anotherKF)\n{\n\tA=anotherKF.A;\n\tH=anotherKF.H;\n\tX1=anotherKF.X1;\n\tX2=anotherKF.X2;\n\tK=anotherKF.K;\n\tZ=anotherKF.Z;\n\tP1=anotherKF.P1;\n\tP2=anotherKF.P2;\n\tQ=anotherKF.Q;\n\tR=anotherKF.R;\n\tCoordinateRoll=anotherKF.CoordinateRoll;\n\tangle=anotherKF.angle;\n\treturn *this;\n}\n\nvoid KalmanFilter::CalCoorRoll(double angle)\n{\n\tCoordinateRoll.m_pTMatrix[0][0]=cos(angle);//angle is radian??\n\tCoordinateRoll.m_pTMatrix[0][1]=sin(angle);\n\tCoordinateRoll.m_pTMatrix[1][0]=-sin(angle);\n\tCoordinateRoll.m_pTMatrix[1][1]=cos(angle);\n\tCoordinateRoll.m_pTMatrix[2][2]=cos(angle);\n\tCoordinateRoll.m_pTMatrix[2][3]=sin(angle);\n\tCoordinateRoll.m_pTMatrix[3][2]=-sin(angle);\n\tCoordinateRoll.m_pTMatrix[3][3]=cos(angle);\n}\n\n\n"
},
{
"alpha_fraction": 0.6527777910232544,
"alphanum_fraction": 0.6805555820465088,
"avg_line_length": 16.75,
"blob_id": "22976bad2459dab3bae06dec0ac80761cd9a0eb4",
"content_id": "586438e5248ac6b1827d6839653b950efc7a3a49",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 72,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 4,
"path": "/athena/core/x86/Control/include/common/cputime.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <sys/time.h>\n#include <stdint.h>\n\nint64_t get_current_time();\n\n"
},
{
"alpha_fraction": 0.5579937100410461,
"alphanum_fraction": 0.5726227760314941,
"avg_line_length": 16.399999618530273,
"blob_id": "9eaf77c8f382a960968522a18a335e8deab5f2d0",
"content_id": "ebb75914db3cfc0e1e558490568ba90db84b2981",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1043,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 55,
"path": "/athena/examples/LCM/Singlecar/control/common/logging.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file logging.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n#ifndef COMMON_LOGGING_H_\n#define COMMON_LOGGING_H_\n\n#include <iostream>\n\nusing namespace std;\n\n/**\n* @namespace athena::control\n* @brief athena::control\n*/\nnamespace athena{\nnamespace control{\n/**\n * @class Logging\n * @brief 日志类.\n */\nclass Logging{\npublic:\n Logging() = default;\n ~Logging() = default;\n typedef enum{\n INFO = 1, /**< 消息*/\n WARNING = 2, /**< 警告*/\n ERROR = 3, /**< 错误*/\n }LogLevel;\n/**\n * @brief 初始化.\n * @param[in] log_enable true=日志记录 false=不记录.\n * @return void.\n */\n static void Init(bool log_enable);\n\n/**\n * @brief 日志记录.\n * @param[in] level 日志等级参考LogLevel.\n * @param[in] info 日志记录信息.\n * @return void.\n */\n static void LogInfo(int level,std::string info);\nprivate:\n ///是否进行日志记录\n static bool log_enable_;\n};\n}\n}\n#endif// COMMON_LOGGING_H_\n"
},
{
"alpha_fraction": 0.5526925325393677,
"alphanum_fraction": 0.5657209157943726,
"avg_line_length": 24.02898597717285,
"blob_id": "b9210fc17c32126dd86d767d5369e77b03304845",
"content_id": "d45ba608716abbe9ad08aa630d0c235b1981bdeb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4142,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 138,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/tmc_stereobmp-forMono.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "SHIFT_JIS",
"text": "#ifndef _TMC_STEREOBMP_H_\n#define _TMC_STEREOBMP_H_\nclass Disparity\n{\n\n};\n\n#include <stdlib.h>\n#include <time.h>\n#include <math.h>\n\ntypedef struct\n{\n int\t\t\tnum_of_cam;\t\t\t\t\t//カメラの台数(単眼:1 ステレオ:2)← このパラメータがない場合は1とする)\n double\t\tbase_length;\t\t\t\t//カメラ基線長\n int\t\t\twidth;\t\t\t\t\t\t//入力画像幅\n int\t\t\theight;\t\t\t\t\t\t//入力画像高さ\n\n double\t\tcam_pos_x;\t\t\t\t\t//カメラ取り付け位置X[mm]\n double\t\tcam_pos_z;\t\t\t\t\t//カメラ取り付け位置Z[mm]\n double\t\tcam_pos_y;\t\t\t\t\t//カメラ取り付け位置Y[mm]\n double\t\troll;\t\t\t\t\t\t//カメラ取り付け角ロール[deg]\n double\t\tpitch;\t\t\t\t\t\t//カメラ取り付け角ピッチ[deg]\n double\t\tyaw;\t\t\t\t\t\t//カメラ取り付け角ヨー[deg]\n\n // *************************************************************************** //\n double\t\tpix_f_x;\t\t\t\t\t//焦点距離(pixel) X方向\n double\t\tpix_f_y;\t\t\t\t\t//焦点距離(pixel) Y方向\n double\t\ti_x0;\t\t\t\t\t\t//光学画像中心(X)\n double\t\ti_y0;\t\t\t\t\t\t//光学画像中心(Y)\n double\t\tlens_dist_rad_k1;\t\t\t//半径方向のレンズ歪み係数K1\n double\t\tlens_dist_rad_k2;\t\t\t//半径方向のレンズ歪み係数K2\n double\t\tlens_dist_tan_p1;\t\t\t//円周方向のレンズ歪み係数P1\n double\t\tlens_dist_tan_p2;\t\t\t//円周方向のレンズ歪み係数P2\n double\t\tundist_pix_f_x;\t\t\t\t//歪補正用焦点距離(pixel)X方向\n double\t\tundist_pix_f_y;\t\t\t\t//歪補正用焦点距離(pixel)Y方向\n\n double\t\tr_r[3][3];\t\t\t\t\t//平行化行列 [行][列]\n\n double\t\tfoe_x;\t\t\t\t\t\t//Z消失点のX座標[pixel]\n double\t\tfoe_y;\t\t\t\t\t\t//Z消失点のY座標[pixel]\n double\t\tratio;\t\t\t\t\t\t//縦横比(縦/横)\n double\t\tcam_deg_v;\t\t\t\t\t//カメラ垂直画角[deg/pix]\n double\t\tres_ang_v;\t\t\t\t\t//垂直画角解像度\n\n double\t\tl_pix_f_x;\t\t\t\t\t//焦点距離(pixel) X方向\n double\t\tl_pix_f_y;\t\t\t\t\t//焦点距離(pixel) Y方向\n double\t\tl_i_x0;\t\t\t\t\t\t//光学画像中心(X)\n double\t\tl_i_y0;\t\t\t\t\t\t//光学画像中心(Y)\n double\t\tl_lens_dist_rad_k1;\t\t\t//半径方向のレンズ歪み係数K1\n double\t\tl_lens_dist_rad_k2;\t\t\t//半径方向のレンズ歪み係数K2\n double\t\tl_lens_dist_tan_p1;\t\t\t//円周方向のレンズ歪み係数P1\n double\t\tl_lens_dist_tan_p2;\t\t\t//円周方向のレンズ歪み係数P2\n double\t\tl_undist_pix_f_x;\t\t\t//歪補正用焦点距離(pixel)X方向\n double\t\tl_undist_pix_f_y;\t\t\t//歪補正用焦点距離(pixel)Y方向\n\n double\t\tr_l[3][3];\t\t\t\t\t//平行化行列 [行][列]\n\n} IPDATA_PARAM_CAM,*LPIPDATA_PARAM_CAM;\n\n\n#define\tCAM_FILE\t\"camera.ini\"\n\nclass PARAM_CAM\n{\nprivate:\n LPIPDATA_PARAM_CAM _pParamCam;\n LPIPDATA_PARAM_CAM _pParamCamDefault;\n LPIPDATA_PARAM_CAM _pParamCamNearArea;\n double _dCurvatureVertical;\n int _iProcArea[2][2];\t// [I or J][Min or Max]\n\n\npublic:\n PARAM_CAM();\n ~PARAM_CAM();\n int load(char *fname);\n\n inline LPIPDATA_PARAM_CAM\tParamCam(void)\n {\n return _pParamCam;\n }\n inline LPIPDATA_PARAM_CAM\tParamCamDefault(void)\n {\n return _pParamCamDefault;\n }\n inline LPIPDATA_PARAM_CAM\tParamCamNearArea(void)\n {\n return _pParamCamNearArea;\n }\n inline double getCurvatureVertical(void)\n {\n return _dCurvatureVertical;\n }\n inline void setCurvatureVertical(double dV)\n {\n _dCurvatureVertical = dV;\n }\n inline int IsrcMin(void)\n {\n return _iProcArea[0][0];\n }\n inline int IsrcMax(void)\n {\n return _iProcArea[0][1];\n }\n inline int JsrcMin(void)\n {\n return _iProcArea[1][0];\n }\n inline int JsrcMax(void)\n {\n return _iProcArea[1][1];\n }\n inline void IsrcMin(int iV)\n {\n _iProcArea[0][0] = iV;\n }\n inline void IsrcMax(int iV)\n {\n _iProcArea[0][1] = iV;\n }\n inline void JsrcMin(int iV)\n {\n _iProcArea[1][0] = iV;\n }\n inline void JsrcMax(int iV)\n {\n _iProcArea[1][1] = iV;\n }\n inline void saveParamCamNearArea(void)\n {\n (*(ParamCamNearArea())) = *ParamCam();\n }\n\n};\n\n#endif _TMC_STEREOBMP_H_\n"
},
{
"alpha_fraction": 0.5196908116340637,
"alphanum_fraction": 0.5272732377052307,
"avg_line_length": 31.21875,
"blob_id": "9a872c005726d76f5e75af0ba9e3d74aedad6b98",
"content_id": "50bd507b7dd1f405a55f044e6bdb35b7c585519d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 34672,
"license_type": "no_license",
"max_line_length": 153,
"num_lines": 1056,
"path": "/athena/examples/LCM/Singlecar/planning/planning_node.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"planning_node.h\"\n#include \"common/cs.h\"\n//#include \"planning/planning_param.h\"\n#include \"config.h\"\n\n/// constructor\nPlanningNode::PlanningNode()\n : planning_url_( OBU_URL )\n , is_route_set_( false )\n{\n init();\n\n}\n\n/// destructor\nPlanningNode::~PlanningNode()\n{\n\n}\n\n/// initializer\nvoid PlanningNode::init()\n{\n ///init lcm msg\n read_motion_plan_config_value_from_file();\n lcm_ins_ = new lcm::LCM( OBU_URL );\n lcm_can_ = new lcm::LCM( OBU_URL );\n lcm_route_ = new lcm::LCM( OBU_URL );\n lcm_map_ = new lcm::LCM( OBU_URL );\n lcm_obstacle_ = new lcm::LCM( OBU_URL );\n lcm_publish_ = new lcm::LCM( OBU_URL );\n\n ///thread\n std::thread thread_ins( &PlanningNode::ThreadFunction_ins, this );\n std::thread thread_can( &PlanningNode::ThreadFunction_can, this );\n std::thread thread_route( &PlanningNode::ThreadFunction_route, this );\n std::thread thread_map( &PlanningNode::ThreadFunction_map, this );\n std::thread thread_obstacle( &PlanningNode::ThreadFunction_obstacle, this );\n\n thread_ins.detach();\n thread_can.detach();\n thread_route.detach();\n thread_map.detach();\n thread_obstacle.detach();\n\n hand_direction_ = 0;\n last_hand_direction_ = 0;\n\n}\n\nvoid PlanningNode::run()\n{\n static bool is_col_last_d = false;\n static bool is_col_last_r = false;\n int cur_pos;\n double view_step = 2.5;\n\n while( 1 )\n {\n ///car state\n planning_.put_car_state( car_state_ );\n\n ///map route\n if ( is_route_set_ )\n {\n planning_.put_map_info( route_ );\n planning_.is_map_updated_ = true;\n is_route_set_ = false;\n }\n\n\n ///planning\n if ( !planning_.is_park_ )\n {\n planning_.park_decision();\n }\n// cout << \"planning_.is_park_ = \" << planning_.is_park_ << endl;\n\n if ( planning_.is_park_ )\n {\n if ( !planning_.is_park_r_published_ )\n {\n // path park_trajectory_d, park_trajectory_r;\n if ( !planning_.is_park_d_published_ )\n {\n ///vui提示\n publish_alarm( ALARM_ADVISE, \"开始泊车\" );\n cout << \"----- 开始泊车 -----\" << endl;\n usleep( 1000000 );\n\n planning_.generate_park_trajectory();\n\n ///下发D档部分\n planning_.put_virtual_path( planning_.park_trajectory_d_ );\n if ( planning_.get_out_trajectory() )\n {\n ///publish\n publish_trajectory( planning_.out_trajectory_ );\n\n publish_view_path( planning_.out_trajectory_, view_step );\n }\n planning_.is_park_d_published_ = true;\n }\n\n if ( !planning_.is_park_d_published_ )\n continue;\n\n cur_pos = planning_.park_trajectory_map_matching();\n int is_collided = planning_.park_trajectory_collision_check( cur_pos );\n if ( is_collided && !is_col_last_d )\n {\n is_col_last_d = true;\n ///停车\n planning_.set_trajectory_longitudinal( cur_pos+1, 0.0, AT_STATUS_P );\n\n publish_trajectory( planning_.out_trajectory_ );\n\n publish_view_path( planning_.out_trajectory_, view_step );\n\n }\n if ( is_col_last_d && !is_collided )\n {\n is_col_last_d = false;\n ///重新起步\n // planning_.set_trajectory_longitudinal( 0, 5.0/3.6, AT_STATUS_D );\n planning_.get_out_trajectory();\n\n publish_trajectory( planning_.out_trajectory_ );\n\n publish_view_path( planning_.out_trajectory_, view_step );\n\n }\n }\n\n\n // cout << \"-----------------------------------------------------------------------\" << endl;\n // cout << \"delta = \" << fabs( planning_.intelligentparking_.get_size_trajectory_d() - cur_pos ) << endl;\n // cout << \"cur_pos = \" << cur_pos << endl;\n // cout << \"planning_.car_state_.car_speed_ = \" << planning_.car_state_.car_speed_ << endl;\n // cout << \"planning_.is_park_r_published_ = \" << planning_.is_park_r_published_ << endl;\n ///判断到达开始倒车的点\n if ( fabs( planning_.intelligentparking_.get_size_trajectory_d() - cur_pos ) < 15\n && planning_.car_state_.car_speed_ < THRESHOLD_CAR_STATIC_SPEED\n && !planning_.is_park_r_published_ )\n {\n ///vui提示\n publish_alarm( ALARM_ADVISE, \"开始倒车,请注意!\" );\n cout << \"----- 开始倒车,请注意! -----\" << endl;\n usleep( 1000000 );\n\n planning_.put_virtual_path( planning_.park_trajectory_r_ );\n if ( planning_.get_out_trajectory() )\n {\n ///publish\n publish_trajectory( planning_.out_trajectory_ );\n\n publish_view_path( planning_.out_trajectory_, view_step );\n }\n planning_.is_park_r_published_ = true;\n\n }\n\n if ( !planning_.is_park_r_published_ )\n continue;\n\n int cur_pos_r = planning_.park_trajectory_map_matching();\n int is_collided_r = planning_.park_trajectory_collision_check( cur_pos_r );\n // cout << \"-------- cur_pos_r = \" << cur_pos_r << \", ---- is_collided_r = \" << is_collided_r << endl;\n if ( is_collided_r && !is_col_last_r )\n {\n is_col_last_r = true;\n ///停车\n planning_.set_trajectory_longitudinal( cur_pos_r+1, 0.0, AT_STATUS_P );\n\n publish_trajectory( planning_.out_trajectory_ );\n\n publish_view_path( planning_.out_trajectory_, view_step );\n cout << \"!!!!!!!!!!!!!!!!!!!!!!!!!!! stop !!!!!!!!!!!!!!!!!!!!!!!!!!!!\" << endl;\n }\n if ( is_col_last_r && !is_collided_r )\n {\n is_col_last_r = false;\n ///重新起步\n //planning_.set_trajectory_longitudinal( 0, 5.0/3.6, AT_STATUS_R );\n planning_.get_out_trajectory();\n\n publish_trajectory( planning_.out_trajectory_ );\n\n publish_view_path( planning_.out_trajectory_, view_step );\n\n }\n\n if ( cur_pos_r >= planning_.intelligentparking_.get_size_trajectory_r()-15 )\n {\n planning_.intelligentparking_.init();\n }\n\n usleep( 50000 );\n\n }\n else\n {\n planning_.map_matching_thread();\n\n planning_.behaviour_decision_thread();\n\n if ( planning_.motion_plan_virtual_lane_thread() )\n {\n if ( planning_.get_out_trajectory() )\n {\n ///publish\n publish_trajectory( planning_.out_trajectory_ );\n\n path show_path;\n planning_.get_collision_check_path( show_path );\n publish_view_path( show_path, view_step );\n }\n }\n }\n\n usleep( 50000 );\n }\n\n\n\n}\n\nint PlanningNode::park_planning( athena::planning::Planning &planning, double view_step,\n bool is_col_last_d, bool is_col_last_r )\n{\n int cur_pos;\n\n if ( !planning.is_park_r_published_ )\n {\n// path park_trajectory_d, park_trajectory_r;\n if ( !planning.is_park_d_published_ )\n {\n ///vui提示\n publish_alarm( ALARM_ADVISE, \"开始泊车\" );\n cout << \"----- 开始泊车 -----\" << endl;\n usleep( 1000000 );\n\n planning.generate_park_trajectory();\n\n ///下发D档部分\n planning.put_virtual_path( planning.park_trajectory_d_ );\n if ( planning.get_out_trajectory() )\n {\n ///publish\n publish_trajectory( planning.out_trajectory_ );\n\n publish_view_path( planning.out_trajectory_, view_step );\n }\n planning.is_park_d_published_ = true;\n }\n\n if ( !planning.is_park_d_published_ )\n return 0;\n\n cur_pos = planning.park_trajectory_map_matching();\n int is_collided = planning.park_trajectory_collision_check( cur_pos );\n if ( is_collided && !is_col_last_d )\n {\n is_col_last_d = true;\n ///停车\n planning.set_trajectory_longitudinal( cur_pos+1, 0.0, AT_STATUS_P );\n\n publish_trajectory( planning.out_trajectory_ );\n\n publish_view_path( planning.out_trajectory_, view_step );\n\n }\n if ( is_col_last_d && !is_collided )\n {\n is_col_last_d = false;\n ///重新起步\n// planning.set_trajectory_longitudinal( 0, 5.0/3.6, AT_STATUS_D );\n planning.get_out_trajectory();\n\n publish_trajectory( planning.out_trajectory_ );\n\n publish_view_path( planning.out_trajectory_, view_step );\n\n }\n }\n\n\n// cout << \"-----------------------------------------------------------------------\" << endl;\n// cout << \"delta = \" << fabs( planning.intelligentparking_.get_size_trajectory_d() - cur_pos ) << endl;\n// cout << \"cur_pos = \" << cur_pos << endl;\n// cout << \"planning.car_state_.car_speed_ = \" << planning.car_state_.car_speed_ << endl;\n// cout << \"planning.is_park_r_published_ = \" << planning.is_park_r_published_ << endl;\n ///判断到达开始倒车的点\n if ( fabs( planning.intelligentparking_.get_size_trajectory_d() - cur_pos ) < 15\n && planning.car_state_.car_speed_ < THRESHOLD_CAR_STATIC_SPEED\n && !planning.is_park_r_published_ )\n {\n ///vui提示\n publish_alarm( ALARM_ADVISE, \"开始倒车,请注意!\" );\n cout << \"----- 开始倒车,请注意! -----\" << endl;\n usleep( 1000000 );\n\n planning.put_virtual_path( planning.park_trajectory_r_ );\n if ( planning.get_out_trajectory() )\n {\n ///publish\n publish_trajectory( planning.out_trajectory_ );\n\n publish_view_path( planning.out_trajectory_, view_step );\n }\n planning.is_park_r_published_ = true;\n\n }\n\n if ( !planning.is_park_r_published_ )\n return 0;\n\n int cur_pos_r = planning.park_trajectory_map_matching();\n int is_collided_r = planning.park_trajectory_collision_check( cur_pos_r );\n// cout << \"-------- cur_pos_r = \" << cur_pos_r << \", ---- is_collided_r = \" << is_collided_r << endl;\n if ( is_collided_r && !is_col_last_r )\n {\n is_col_last_r = true;\n ///停车\n planning.set_trajectory_longitudinal( cur_pos_r+1, 0.0, AT_STATUS_P );\n\n publish_trajectory( planning.out_trajectory_ );\n\n publish_view_path( planning.out_trajectory_, view_step );\n cout << \"!!!!!!!!!!!!!!!!!!!!!!!!!!! stop !!!!!!!!!!!!!!!!!!!!!!!!!!!!\" << endl;\n }\n if ( is_col_last_r && !is_collided_r )\n {\n is_col_last_r = false;\n ///重新起步\n //planning.set_trajectory_longitudinal( 0, 5.0/3.6, AT_STATUS_R );\n planning.get_out_trajectory();\n\n publish_trajectory( planning.out_trajectory_ );\n\n publish_view_path( planning.out_trajectory_, view_step );\n\n }\n\n if ( cur_pos_r >= planning.intelligentparking_.get_size_trajectory_r()-15 )\n {\n planning.intelligentparking_.init();\n }\n\n return 1;\n}\n\nint PlanningNode::publish_trajectory( OutTrajectory ot )\n{\n if ( !lcm_publish_->good() )\n {\n return 0;\n }\n\n obu_lcm::mt_info_report m_motion_planner_msg;\n m_motion_planner_msg.points.clear();\n m_motion_planner_msg.num_of_points = ot.num_of_points_;\n m_motion_planner_msg.type = ot.car_action_; ///<temp\n m_motion_planner_msg.gps_time = 0.0;\n m_motion_planner_msg.car_action = ot.car_action_;\n m_motion_planner_msg.driving_mode = ot.driving_mode_;\n cout << \"-------------- m_motion_planner_msg.driving_mode = \" << m_motion_planner_msg.driving_mode << endl;\n\n for( auto point_temp : ot.points_ )\n {\n obu_lcm::nav_points msg_p;\n\n msg_p.s = point_temp.s;\n msg_p.p_x = point_temp.position_x;\n msg_p.p_y = point_temp.position_y;\n msg_p.p_v = point_temp.speed_desired_Uxs;\n msg_p.p_a = point_temp.acceleration_desired_Axs;\n msg_p.p_h = point_temp.heading;\n msg_p.p_k = -point_temp.k_s;\n msg_p.p_g = point_temp.gears;\n\n m_motion_planner_msg.points.push_back( msg_p );\n }\n\n lcm_publish_->publish( \"mt_info_report\", &m_motion_planner_msg );\n cout << \" --- send mt_info_report msg : size \" << m_motion_planner_msg.num_of_points<< endl;\n\n return 1;\n}\n\nint PlanningNode::publish_view_path( OutTrajectory out_traj, double step )\n{\n if ( !lcm_publish_->good() )\n {\n return 0;\n }\n int num = out_traj.points_.size();\n if ( num <= 0 )\n {\n return 0;\n }\n\n obu_lcm::lateral_control_vui_info m_motion_planner_msg;\n m_motion_planner_msg.points.clear();\n// m_motion_planner_msg.num_of_points = num;\n m_motion_planner_msg.gps_time = 0.0;\n\n obu_lcm::nav_points msg_p;\n double delta_length = 0.0;\n int index = 0;\n\n point2msg( out_traj.points_[0], &msg_p );\n for( int i = 1; i < num; i++ )\n {\n delta_length += length_of_two_navipoint( out_traj.points_[i], out_traj.points_[i-1] );\n if ( delta_length >= step )\n {\n point2msg( out_traj.points_[i], &msg_p );\n m_motion_planner_msg.points.push_back( msg_p );\n delta_length = 0.0;\n }\n }\n m_motion_planner_msg.num_of_points = m_motion_planner_msg.points.size();\n\n lcm_publish_->publish( \"lateral_control_vui_info\", &m_motion_planner_msg );\n cout << \" --- send lateral_control_vui_info msg : size \" << m_motion_planner_msg.num_of_points<< endl;\n\n /*log*/\n if( TRAJECTORY_LOG_SWITCH )\n {\n ofstream outfile(\"lateral_control_vui_info.log\", std::ios::app);\n outfile.precision(8);\n\n outfile << \" num_of_points : \" << m_motion_planner_msg.num_of_points << \" \"\n << endl << endl;\n\n for(int i=0; i < m_motion_planner_msg.num_of_points; i++)\n {\n outfile << \" i \" << i\n << \" x \" << m_motion_planner_msg.points[i].p_x\n << \" y \" << m_motion_planner_msg.points[i].p_y\n << \" h \" << m_motion_planner_msg.points[i].p_h\n << \" v \" << m_motion_planner_msg.points[i].p_v\n << \" a \" << m_motion_planner_msg.points[i].p_a\n << \" k \" << m_motion_planner_msg.points[i].p_k\n << \" g \" << (int)m_motion_planner_msg.points[i].p_g\n << endl;\n }\n\n outfile << endl;\n outfile.close();\n }\n\n return 1;\n\n}\n\nint PlanningNode::publish_view_path( path out_path, double step )\n{\n if ( !lcm_publish_->good() )\n {\n return 0;\n }\n int num = out_path.ref_points.size();\n if ( num <= 0 )\n {\n return 0;\n }\n\n obu_lcm::lateral_control_vui_info m_motion_planner_msg;\n m_motion_planner_msg.points.clear();\n// m_motion_planner_msg.num_of_points = num;\n m_motion_planner_msg.gps_time = 0.0;\n\n obu_lcm::nav_points msg_p;\n double delta_length = 0.0;\n int index = 0;\n\n point2msg( out_path.ref_points[0], &msg_p );\n for( int i = 1; i < num; i++ )\n {\n delta_length += length_of_two_navipoint( out_path.ref_points[i], out_path.ref_points[i-1] );\n if ( delta_length >= step )\n {\n point2msg( out_path.ref_points[i], &msg_p );\n m_motion_planner_msg.points.push_back( msg_p );\n delta_length = 0.0;\n }\n }\n m_motion_planner_msg.num_of_points = m_motion_planner_msg.points.size();\n\n lcm_publish_->publish( \"lateral_control_vui_info\", &m_motion_planner_msg );\n cout << \" --- send lateral_control_vui_info msg : size \" << m_motion_planner_msg.num_of_points<< endl;\n\n /*log*/\n if( TRAJECTORY_LOG_SWITCH )\n {\n ofstream outfile(\"lateral_control_vui_info.log\", std::ios::app);\n outfile.precision(8);\n\n outfile << \" num_of_points : \" << m_motion_planner_msg.num_of_points << \" \"\n << endl << endl;\n\n for(int i=0; i < m_motion_planner_msg.num_of_points; i++)\n {\n outfile << \" i \" << i\n << \" x \" << m_motion_planner_msg.points[i].p_x\n << \" y \" << m_motion_planner_msg.points[i].p_y\n << \" h \" << m_motion_planner_msg.points[i].p_h\n << \" v \" << m_motion_planner_msg.points[i].p_v\n << \" a \" << m_motion_planner_msg.points[i].p_a\n << \" k \" << m_motion_planner_msg.points[i].p_k\n << \" g \" << (int)m_motion_planner_msg.points[i].p_g\n << endl;\n }\n\n outfile << endl;\n outfile.close();\n }\n\n return 1;\n\n}\n\nint PlanningNode::publish_alarm( int32_t alarm_level, string alarm )\n{\n if ( !lcm_publish_->good() )\n {\n return 0;\n }\n\n nad_lcm::ou_alarm_report msg_alarm;\n\n msg_alarm.alarm_proc = \"planning\";\n msg_alarm.alarm_time = 0.0;\n msg_alarm.alarm_level = alarm_level;\n msg_alarm.alarm_type = ALARM_TYPE_OCCUR;\n msg_alarm.alarm_no = 0;\n msg_alarm.alarm_show = ALARM_SHOW_TEXT_SOUND;\n msg_alarm.alarm = alarm;\n\n lcm_publish_->publish( \"ou_alarm_report\", &msg_alarm );\n\n return 1;\n}\n\n/*************** Functions of All Thread *****************************\n *\n * ThreadFunction_ins\n * ThreadFunction_can\n * ThreadFunction_route\n * ThreadFunction_map\n * ThreadFunction_obstacle\n *\n *********************************************************************/\nvoid* PlanningNode::ThreadFunction_ins()\n{\n if ( !lcm_ins_->good() )\n return NULL;\n\n lcm_ins_->subscribe( \"ins_info\", &PlanningNode::handle_ins_Message, this );\n\n while( 0 == lcm_ins_->handle() );\n return NULL;\n}\n\nvoid* PlanningNode::ThreadFunction_can()\n{\n if ( !lcm_can_->good() )\n return NULL;\n\n// lcm_can_->subscribe( \"CAN_value\", &PlanningNode::handle_can_value_Message, this );\n// lcm_can_->subscribe( \"CAN_status\", &PlanningNode::handle_can_status_Message, this );\n// lcm_can_->subscribe( \"chassis_detail_info\", &PlanningNode::handle_chassis_detail_Message, this );\n// lcm_can_->subscribe( \"steering_feedback_info\", &PlanningNode::handle_steering_Message, this );\n lcm_can_->subscribe( \"vehicle_info\", &PlanningNode::handle_vehicle_info_Message, this );\n\n while( 0 == lcm_can_->handle() );\n return NULL;\n}\n\nvoid* PlanningNode::ThreadFunction_route()\n{\n if ( !lcm_route_->good() )\n return NULL;\n\n lcm_route_->subscribe( \"om_route_respond\", &PlanningNode::handle_route_info, this );\n\n while( 0 == lcm_route_->handle() );\n return NULL;\n}\n\nvoid* PlanningNode::ThreadFunction_map()\n{\n if ( !lcm_map_->good() )\n return NULL;\n\n lcm_map_->subscribe( \"om_traffic_lights_report\", &PlanningNode::handle_traffic_lights_info, this );\n lcm_map_->subscribe( \"back_coordinate_XYH\", &PlanningNode::handle_back_coordinate_XYH, this );\n lcm_map_->subscribe( \"ou_start_auto_respond\", &PlanningNode::handle_start_respond, this );\n lcm_map_->subscribe( \"ou_stop_auto_respond\", &PlanningNode::handle_stop_respond, this );\n\n while( 0 == lcm_map_->handle() );\n return NULL;\n}\n\nvoid* PlanningNode::ThreadFunction_obstacle()\n{\n if ( !lcm_obstacle_->good() )\n return NULL;\n\n lcm_obstacle_->subscribe( \"sensor_obstacle_report\", &PlanningNode::handle_obstacle_Message, this );\n\n while( 0 == lcm_obstacle_->handle() );\n return NULL;\n}\n\n\n/*************** Functions of All LCM Handle **************************\n *\n * handle_ins_Message\n * handle_can_value_Message\n * handle_can_status_Message\n * handle_vehicle_info_Message\n * handle_chassis_detail_Message\n * handle_steering_Message\n * handle_route_info\n * handle_traffic_lights_info\n * handle_back_coordinate_XYH\n * handle_obstacle_Message\n *\n **********************************************************************/\n// 惯导知道位置变了,和虚拟车道匹配之后来确定位置,并输出虚拟车道给控制部分。\nvoid PlanningNode::handle_ins_Message( const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ins_info* msg)\n{\n get_current_ins_info( msg );\n\n usleep(100);\n}\n\n\nvoid PlanningNode::handle_can_value_Message( const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::CAN_value* msg)\n{\n if ( SOURCE_OF_CAR_SPEED ) ///1:从can信号获得本车当前速度\n car_state_.car_speed_ = msg->car_speed;\n// cout << \"------------- car_speed_ = \" << car_state_.car_speed_ << endl;\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_can_status_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::CAN_status* msg)\n{\n car_state_.at_status_ = msg->at_status;\n\n hand_direction_ = msg->right_light_status - msg->left_light_status;\n\n if ( hand_direction_ != last_hand_direction_ )\n {\n planning_.decide_hand_expected_lane( hand_direction_ );\n\n last_hand_direction_ = hand_direction_;\n }\n\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_vehicle_info_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::vehicle_info* msg)\n{\n if ( SOURCE_OF_CAR_SPEED ) ///1:从can信号获得本车当前速度\n {\n car_state_.car_speed_ = msg->vehicle_speed;\n }\n car_state_.at_status_ = msg->at_status;\n car_state_.steer_angle_ = msg->steer_angle;\n\n hand_direction_ = msg->right_turn_status - msg->left_turn_status;\n\n if ( hand_direction_ != last_hand_direction_ )\n {\n planning_.decide_hand_expected_lane( hand_direction_ );\n\n last_hand_direction_ = hand_direction_;\n }\n\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_chassis_detail_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ChassisDetail* msg)\n{\n car_state_.car_speed_ = msg->car_speed;\n car_state_.at_status_ = msg->gear_level;\n\n hand_direction_ = msg->right_turn_status - msg->left_turn_status;\n\n if ( hand_direction_ != last_hand_direction_ )\n {\n planning_.decide_hand_expected_lane( hand_direction_ );\n\n last_hand_direction_ = hand_direction_;\n }\n\n\n// left_turn = msg->left_turn_status;\n// right_turn = msg->right_turn_status;\n//\n// if ( left_turn == 1 && right_turn == 0 )\n// {\n// planning_.decide_hand_expected_lane( -1 );\n// }\n//\n// if ( left_turn == 0 && right_turn == 1 )\n// {\n// planning_.decide_hand_expected_lane( 1 );\n// }\n//\n// if ( left_turn == 0 && right_turn == 0 )\n// {\n// planning_.decide_hand_expected_lane( 0 );\n// }\n\n usleep(100);\n}\n\n//void PlanningNode::handle_steering_Message( const lcm::ReceiveBuffer* rbuf,\n// const std::string& chan,\n// const obu_lcm::steering_feedback_info* msg)\n//{\n// // 从底层CAN 总线获取方向盘转角信息。 是执行器这边发过来的。\n// car_state_.steer_angle_ = msg->steering_angle;\n//\n// usleep(100);\n//}\n\nvoid PlanningNode::handle_route_info( const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::om_route_respond* msg)\n{\n cout << \" +++++ +++++ +++++ +++++ +++++ +++++ +++++ +++++ +++++ +++++ +++++ +++++ +++++ \" << endl\n << \" msg->replan_flag \" << msg->replan_flag << endl\n << \" msg->route.num_of_section \"<< msg->route.num_of_section << endl;\n\n if(msg->route.num_of_section < 16 )\n return;\n\n if( msg->replan_flag )\n {\n route_.replan_flag = REPLAN_FLAG_NAVI;\n }\n else\n {\n route_.replan_flag = REPLAN_FLAG_NONE;\n }\n\n route_.route_clear();\n if ( get_map_info( msg ) )\n {\n cout << \"+++++ get map information successfully ! \" << endl;\n /**input**/\n// planning_.put_map_info( route_ );\n }\n\n is_route_set_ = true;\n\n\tusleep(1000);\n}\n\nvoid PlanningNode::handle_traffic_lights_info( const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::om_traffic_lights_report* msg)\n{\n cout << \"------[ traffic lights ] \" << msg->light_status << endl;\n cout << \"------[ crossing status ] \" << (int)msg->crossing_status << endl;\n\n traffic_lights_.crossing_status_ = msg->crossing_status;\n traffic_lights_.light_status_ = msg->light_status;\n\n /**input**/\n planning_.put_traffic_lights( traffic_lights_ );\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_back_coordinate_XYH( const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::back_coordinate_XYH* msg)\n{\n#if 1\n cout<<\"===[STOP]=== x:\"<<msg->x<<\", y:\"<<msg->y<< \", h:\"<<msg->heading<<\", type:\"<<msg->type<<\", length:\"<<msg->length<<\", width:\"<<msg->width<<endl;\n#endif // debug\n\n stop_park_.x_ = msg->x;\n stop_park_.y_ = msg->y;\n stop_park_.heading_ = msg->heading;\n stop_park_.length_ = msg->length;//停车位长度\n stop_park_.width_ = msg->width;//停车位宽度\n stop_park_.type_ = msg->type; //0终点停车,否则泊车\n\n /**input**/\n planning_.put_stop_park( stop_park_, stop_park_.type_ );\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_start_respond(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::ou_start_auto_respond* msg)\n{\n cout << \">>>>>>>>>> ou_start_auto_respond \" << endl;\n\n planning_.set_start_auto( true );\n// planning_.can_stop_auto_ = 0;\n\n //发起自动驾驶的时候,进行一次完整的重规划。\n planning_.route_.replan_flag = REPLAN_FLAG_NAVI;\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_stop_respond(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::ou_stop_auto_respond* msg)\n{\n cout << \">>>>>>>>>> ou_stop_auto_respond \" << endl;\n\n planning_.set_start_auto( false );\n// planning_.can_stop_auto_ = 1;\n\n //发起自动驾驶的时候,进行一次完整的重规划。\n planning_.route_.replan_flag = REPLAN_FLAG_NAVI;\n\n usleep(100);\n}\n\nvoid PlanningNode::handle_obstacle_Message( const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::sensor_obstacle_report* msg_obstacle_list)\n{\n if ( get_obstacles_raw( msg_obstacle_list ) )\n {\n /**input**/\n planning_.put_obstacles_info( sensor_obstacles_ );\n// cout << \"22222222222 sensor_obstacles_ = \" << sensor_obstacles_.num_of_obstacle_ << endl;\n }\n\n usleep(100);\n}\n\n/*********************************************************************************************************************/\n///get ins info\nint PlanningNode::get_current_ins_info( const obu_lcm::ins_info * msg )\n{\n car_state_.GPS_time_ = msg->gps_time;\n\n if ( !SOURCE_OF_CAR_SPEED ) ///0:从惯导获得本车当前速度\n {\n car_state_.car_speed_ = sqrt( pow( msg->lateral_speed, 2) + pow( msg->longitudinal_speed, 2) );\n }\n\n// cout << \"----++++++------ yaw : \" << msg->heading << endl;\n\n car_state_.car_pose_.Current_roll_ = msg->roll;\n car_state_.car_pose_.Current_pitch = msg->pitch;\n car_state_.car_pose_.Current_heading_ = msg->heading;\n\n //temp\n xy_point wp;\n coord_transfer transfer;\n wp = transfer.cs.ll2xy( msg->lat, msg->lon );\n\n car_state_.car_pose_.CurrentX_ = std::get<X_CS>(wp);\n car_state_.car_pose_.CurrentY_ = std::get<Y_CS>(wp);\n car_state_.car_pose_.CurrentZ_ = msg->height;\n\n return 1;\n\n}\n\nint PlanningNode::get_map_info( const nad_lcm::om_route_respond* msg )\n{\n //using std::get;\n route_.cur_section = msg->route.cur_section;\n route_.mileage_next = msg->route.mileage_next;\n route_.mileage_pass = msg->route.mileage_pass;\n route_.mileage_start = msg->route.mileage_start;\n route_.mileage_stop = msg->route.mileage_stop;\n route_.time_stamp = msg->route.time_stamp;\n\n point_xys temp_point_xys;\n\n //left_edge\n route_.left_edge.line.clear();\n route_.left_edge.num_of_points = msg->route.left_edge.num_of_point;\n //route.left_edge.line.assign( msg->route.left_edge.line.begin(), msg->route.left_edge.line.end() );\n for ( int i = 0; i < msg->route.left_edge.num_of_point; i++ )\n {\n temp_point_xys.x = msg->route.left_edge.line[ i ].x;\n temp_point_xys.y = msg->route.left_edge.line[ i ].y;\n\n route_.left_edge.line.push_back( temp_point_xys );\n }\n\n //left line\n route_.left_line.line.clear();\n route_.left_line.num_of_points = msg->route.left_line.num_of_point;\n//zp20171013:使用车道线进行赋值判断,不使用边界线赋值判断\n for ( int i = 0; i < msg->route.left_line.num_of_point; i++ )\n {\n temp_point_xys.x = msg->route.left_line.line[ i ].x;\n temp_point_xys.y = msg->route.left_line.line[ i ].y;\n\n route_.left_line.line.push_back( temp_point_xys );\n }\n\n //right_edge\n route_.right_edge.line.clear();\n route_.right_edge.num_of_points = msg->route.right_edge.num_of_point;\n for ( int i = 0; i < msg->route.right_edge.num_of_point; i++ )\n {\n temp_point_xys.x = msg->route.right_edge.line[ i ].x;\n temp_point_xys.y = msg->route.right_edge.line[ i ].y;\n\n route_.right_edge.line.push_back( temp_point_xys );\n }\n\n //right_line\n route_.right_line.line.clear();\n route_.right_line.num_of_points = msg->route.right_line.num_of_point;\n//zp20171013:使用车道线进行赋值判断,不使用边界线赋值判断\n for ( int i = 0; i < msg->route.right_line.num_of_point; i++ )\n {\n temp_point_xys.x = msg->route.right_line.line[ i ].x;\n temp_point_xys.y = msg->route.right_line.line[ i ].y;\n\n route_.right_line.line.push_back( temp_point_xys );\n }\n\n //sections\n point_m temp_point_m;\n section_m temp_section_m;\n\n route_.num_of_section = msg->route.num_of_section;\n route_.line.clear();\n //section\n for ( int j = 0; j < msg->route.num_of_section; j++ )\n {\n temp_section_m.num_of_lane = msg->route.line[ j ].num_of_lane;\n temp_section_m.lane.clear();\n //points of section\n for ( int n = 0; n < msg->route.line[ j ].num_of_lane; n++ )\n {\n //temp_point_m = msg->route.line[ j ].lane[ n ]\n temp_point_m.k = msg->route.line[ j ].lane[ n ].k;\n temp_point_m.mileage = msg->route.line[ j ].lane[ n ].mileage;\n //储存切片类型\n temp_point_m.type = msg->route.line[ j ].lane[ n ].type;\n temp_point_m.width = msg->route.line[ j ].lane[ n ].width;\n temp_point_m.x = msg->route.line[ j ].lane[ n ].x;\n temp_point_m.y = msg->route.line[ j ].lane[ n ].y;\n temp_point_m.yaw = msg->route.line[ j ].lane[ n ].yaw;\n temp_point_m.sug_speed = msg->route.line[ j ].lane[ n ].sug_speed;\n\n temp_section_m.lane.push_back( temp_point_m );\n }\n route_.line.push_back( temp_section_m );\n }\n\n return 1;\n\n\n}\n\nint PlanningNode::get_obstacles_raw( const nad_lcm::sensor_obstacle_report* msg_obstacle_list )\n{\n// cout << \"11111111111111111111 msg_obstacle_list->num_of_obstacle = \" << msg_obstacle_list->num_of_obstacle << endl;\n if ( msg_obstacle_list->num_of_obstacle < 0 )\n return 0;\n\n sensor_obstacles_.num_of_obstacle_ = msg_obstacle_list->num_of_obstacle;\n sensor_obstacles_.obstacles_info_.clear();\n\n for ( auto obs_msg : msg_obstacle_list->obstacle )\n {\n ObstacleInfo obs_temp;\n\n obs_temp.gps_time_ = obs_msg.gps_time;\n obs_temp.id_ = obs_msg.id;\n obs_temp.type_ = obs_msg.type;\n obs_temp.x_ = obs_msg.x;\n obs_temp.y_ = obs_msg.y;\n obs_temp.width_ = obs_msg.width;\n obs_temp.height_ = obs_msg.height;\n obs_temp.yaw_ = obs_msg.yaw;\n obs_temp.speed_ = obs_msg.speed;\n// cout << \"<<<<<< x = \" << obs_temp.x_ << \", y = \" << obs_temp.y_ << endl;\n\n\n sensor_obstacles_.obstacles_info_.push_back( obs_temp );\n }\n\n if ( OBSTACLES_LOG_SWITCH )\n {\n ofstream outfile(\"obstacles_raw.log\", std::ios::app);\n outfile.precision(8);\n\n outfile << \" num_of_obstacle : \" << sensor_obstacles_.num_of_obstacle_ << endl;\n\n for ( int i = 0; i < sensor_obstacles_.num_of_obstacle_; i++ )\n {\n outfile << \" i \" << i\n << \" id \" << sensor_obstacles_.obstacles_info_[i].id_\n << \" type \" << sensor_obstacles_.obstacles_info_[i].type_\n << \" x \" << sensor_obstacles_.obstacles_info_[i].x_\n << \" y \" << sensor_obstacles_.obstacles_info_[i].y_\n << \" width \" << sensor_obstacles_.obstacles_info_[i].width_\n << \" height \" << sensor_obstacles_.obstacles_info_[i].height_\n << \" yaw \" << sensor_obstacles_.obstacles_info_[i].yaw_\n << \" speed \" << sensor_obstacles_.obstacles_info_[i].speed_\n << endl;\n }\n\n outfile << endl << endl;\n outfile.close();\n }\n\n\n return 1;\n}\n\nint point2msg( const navi_point& src_point, obu_lcm::nav_points* msg_p )\n{\n msg_p->s = src_point.s;\n msg_p->p_x = src_point.position_x;\n msg_p->p_y = src_point.position_y;\n msg_p->p_v = src_point.speed_desired_Uxs;\n msg_p->p_a = src_point.acceleration_desired_Axs;\n msg_p->p_h = src_point.heading;\n msg_p->p_k = -src_point.k_s;\n msg_p->p_g = src_point.gears;\n\n return 1;\n}\n\n\n\n"
},
{
"alpha_fraction": 0.6869773268699646,
"alphanum_fraction": 0.6869773268699646,
"avg_line_length": 23.617647171020508,
"blob_id": "f13035e4a762f366acb20bba7cf5b3fe55923de5",
"content_id": "a9288c2cecaa004038fe3099b36ff6dcabe6db14",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 837,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 34,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerPairs.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerPair.h\"\n\nclass LaneMarkerPairs\n{\nprivate:\n FlexArray<ptrLaneMarkerPair>\t*_faLaneMarkerPairs;\n\npublic:\n LaneMarkerPairs(void);\n ~LaneMarkerPairs(void);\n LaneMarkerPair *getLaneMarkerPair(int idx);\n void deleteLaneMarkerPair(void);\n void deleteLaneMarkerPair(int iIdx);\n int getLaneMarkerPairNumber(void);\n void addLaneMarkerPair(LaneMarkerPair *pLaneMarkerPair);\n\n inline void remove_delete(int iIdx)\n {\n _faLaneMarkerPairs->remove_delete(iIdx);\n }\n inline void clear_reset(void)\n {\n _faLaneMarkerPairs->clear();\n _faLaneMarkerPairs->reset();\n }\n inline void reset(void)\n {\n _faLaneMarkerPairs->reset();\n }\n void set(int iIdx, LaneMarkerPair *pLaneMarkerPair);\n};\n"
},
{
"alpha_fraction": 0.5742626190185547,
"alphanum_fraction": 0.6152440905570984,
"avg_line_length": 23.87013053894043,
"blob_id": "f6855ca217a0b86e732a1b85d96e8148c5c75cf8",
"content_id": "65e550d32df763e02f7fa9d43b96134046611b86",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4989,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 154,
"path": "/athena/examples/LCM/Singlecar/control/common/chassis_detail.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n/**\n * @file control_logic.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_CHASSIS_DETAIL_H_\n#define COMMON_CHASSIS_DETAIL_H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n/**\n * @class ChassisDetail\n * @brief 车辆底盘信息.\n */\nclass ChassisDetail\n{\npublic:\n ChassisDetail() = default;\n ~ChassisDetail() = default;\n ///底盘错误 0=正常 1=有错误\n int32_t chassis_error_;\n ///左前轮转速 rpm\n\tdouble wheel_speed_fl_;\n\t///右前轮转速 rpm\n\tdouble wheel_speed_fr_;\n\t///左后轮转速 rpm\n\tdouble wheel_speed_bl_;\n\t///右后轮转速 rpm\n\tdouble wheel_speed_br_;\n\t///车速\n\tdouble car_speed_;\n\t///发动机转速 rpm\n\tdouble eng_rpm_;\n\t///加速踏板位置 %\n\tdouble acc_pedal_pos_;\n\t///节气门反馈 %\n\tdouble throttle_pos_feedback_;\n\t///TCU无降扭请求时的发动机扭矩 NM\n\tdouble eng_torq_without_tcured_;\n\t///蓄电池电压 V\n\tdouble batt_volt_v_;\n\t///驾驶员需求扭矩\n\tdouble driver_torque_;\n\t///发动机实际指示扭矩\n\tdouble eng_actual_ind_torque_;\n\t///发动机总摩擦扭矩\n\tdouble friction_torque_;\n\t///慢速扭矩请求值 Nm\n\tdouble torque_limitation_;\n\t///快速扭矩请求值 Nm\n\tdouble torque_reduction_;\n\t///涡轮转速 rpm\n\tdouble turbine_rpm_;\n\t///制动压力信号 Mpa\n\tdouble brake_pressure_;\n\t///发动机运行状态\n\tint32_t engine_running_status_;\n\t///档杆位置\n\tint32_t gear_level_;\n\t///自动挡行驶档位\n\tint32_t at_gear_;\n\t///制动踏板开关信号\n\tint32_t brake_status_;\n\t///EPB状态 0 禁能 1 使能\n\tint32_t epb_brake_status_;\n\t///发动机减速断油 0 否 1 是\n\tint32_t dfco_;\n\t///怠速状态 0 否 1 是\n\tint32_t idle_status_;\n\t///TCU降扭有效性 0 available 1 not available\n\tint32_t tcu_torque_red_availability_;\n\t///发动机扭矩错误 0 OK 1 Fail\n\tint32_t eng_torque_failure_;\n\t///发动机释放 0 undefine 1 engine locked 2 engine release 3 reserved\n\tint32_t ems_released_;\n\t///发动机启动 0 未启动 1 启动\n\tint32_t eng_started_;\n\t///降扭请求标志为 0 无请求 1 有\n\tint32_t torque_reduction_flag_;\n\t///慢速扭矩请求标志位\n\tint32_t torque_limitation_flag_;\n\t///输入离合器状态 0 未吸合 1 吸合\n\tint32_t gear_eng_agement_;\n\t///TCU状态 0 正常 1 非跛行模式故障 2 跛行模式故障 3 未定义\n\tint32_t tcu_state_;\n\t///EBD 电子制动力分配系统是否工作 0 不工作 1 工作\n\tint32_t ebd_work_;\n\t///ABS工作标志 0 不工作 1 工作\n\tint32_t abs_ebdlable_;\n\t///油门控制工作模式 E 可控状态 0 暂时不可控 1 可控 2 正处于被控制状态 3 永久失效\n\tint32_t acc_driving_mode_feedback_;\n ///转向人工干预 0=否 1=是\n\tint32_t steering_manual_intervention;\n\t///转向人工干预是否有效 0=无效 1=有效\n\tint32_t steering_manual_intervention_vaild;\n\t///转向角度反馈\n\tdouble steering_angle_feedback_;\n\t///转向角速度反馈\n\tdouble steering_angle_speed_feedback_;\n\t///转向工作模式 可控状态 0 暂时不可控 1 可控 2 正处于被控制状态 3 永久失效\n\tint32_t steering_driving_mode_feedback_;\n\t///刹车值反馈\n\tdouble brake_value_feedback_;\n\t///刹车执行时间反馈\n\tdouble brake_run_time_feedback_;\n\t///纵向控制工作模式反馈 可控状态 0 暂时不可控 1 可控 2 正处于被控制状态 3 永久失效\n\tint32_t brake_driving_mode_feedback_;\n\t///EPB控制工作模式反馈 可控状态 0 暂时不可控 1 可控 2 正处于被控制状态 3 永久失效\n\tint32_t epb_driving_mode_feedback_;\n\t///喇叭控制 0 禁声音 1 鸣笛\n int8_t speaker_status_;\n ///远光灯 0 关闭 1 开启\n int8_t high_beam_status_;\n ///近光灯 0 关闭 1 开启\n int8_t low_beam_status_;\n ///左转向灯 0 关闭 1 开启\n int8_t left_turn_status_;\n ///右转向灯 0 关闭 1 开启\n int8_t right_turn_status_;\n ///前雨刮器 0 关闭 1 开启\n\tint8_t front_wiper_status_;\n ///后雨刮器 0 关闭 1 开启\n int8_t rear_wiper_status_;\n ///位置灯 0 关闭 1 开启\n int8_t position_lamp_status_;\n ///前雾灯 0 关闭 1 开启\n int8_t front_fog_lamp_status_;\n ///后雾灯 0 关闭 1 开启\n int8_t rear_fog_lamp_status_;\n ///刹车灯 一般情况自动控制 0 关闭 1 开启\n int8_t brake_lamp_status_;\n ///警报灯 双闪 0 关闭 1 开启\n int8_t alarm_lamp_status_;\n /// 左前门控制 0 关闭 1 开启\n int8_t lf_door_status_;\n /// 右前门控制 0 关闭 1 开启\n int8_t rf_door_status_;\n /// 左后门控制 0 关闭 1 开启\n int8_t lr_door_status_;\n /// 右后门控制 0 关闭 1 开启\n int8_t rr_door_status_;\n};\n}//namespace control\n}//namespace athena\n\n#endif\n"
},
{
"alpha_fraction": 0.5981112122535706,
"alphanum_fraction": 0.6164742708206177,
"avg_line_length": 25.845069885253906,
"blob_id": "6acfe951c2045825b730e5c9836f2bcaa123835c",
"content_id": "d61ecc160c1fb1c9df0fb89ac8dfb4bc27bf8bea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 5384,
"license_type": "no_license",
"max_line_length": 220,
"num_lines": 142,
"path": "/athena/examples/LCM/Singlecar/control/README.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "# Athena软件开发套件Control \n## svn 操作说明\n### 下载\nsvn update\n### 上传\nsvn ci -m \"\"\n\n#### 项目介绍\nAthena软件套件控制层软件命名为control,与旧版本controller相比有以下区别:1 软件框架优化。control软件分为三层,应用层,逻辑层以及控制器,应用层主要功能包括openGL显示、消息收发,调试、日志;逻辑层主要是刹车、档位、油门、驾驶模式、EPB等的逻辑处理,控制器为横纵向控制算法。2 消息更改。接收规划层bcm消息更改,控制命令信息更改;3 注释符合doxygen规范;4 代码风格为Google style。\n\n#### 软件架构\n软件架构说明\n\n### 部分关键宏定义\ntypedef enum{\n LCM = 1, /**< LCM消息*/\n}MessageType;\n<<<<<<< HEAD\n\ntypedef enum{\n CS55 = 1, /**< LCM消息*/\n}VehicleType;\n\ntypedef enum{\n INVALID = 0, /**< 无效*/\n HUMAN_DRIVING_MODE = 1, /**< 人工驾驶*/\n AUTO_DRIVING_MODE = 3, /**< 自动驾驶*/\n}DrivingMode;\n\ntypedef enum{\n UNCONTROLLABLE = 0, /**< 不可控*/\n CONTROLLABLE = 1, /**< 可控但处于非控制状态*/\n CONTROLLED = 2, /**< 正在控制中*/\n}DrivingModeFeedBack;\n\ntypedef enum{\n POSITION_P = 0, /**< P档位*/\n POSITION_R = 1, /**< R档位*/\n POSITION_N = 2, /**< N档位*/\n POSITION_D = 3, /**< D档位*/\n}GearLevel;\n\ntypedef enum{\n POSITION_P_FEEDBACK = 0, /**< P档位反馈*/\n POSITION_R_FEEDBACK = 1, /**< R档位反馈*/\n POSITION_N_FEEDBACK = 2, /**< N档位反馈*/\n POSITION_D_FEEDBACK = 3, /**< D档位反馈*/\n POSITION_SHIFTING_FEEDBACK = 4, /**< 档位切换过程中*/\n POSITION_INVALID_FEEDBACK = 5, /**< 无效档位*/\n}GearLevelFeedBack;\n\n#### 更新日志\n1.新建分支 athena_control_alex,后面的更改在该分支下更改\n2.修改文件生成目录,将controller_lib动态链接库默认生成到config目录,对应修改control工程的库文件查找目录\n3.将坐标转换功能提前到路径判断之前,方便在规划无路径下发时也能显示车辆状态\n4.仿真测试OK,未实车测试\n\n\n1.经历了实车测试,仅测试横向控制\n2.修改配置,使在release编译时使用arm交叉编译\n3.规划仅在启动自动驾驶时重规划,会发送启动指令,但退出自动驾驶不会重规划,不会发送退出指令\n4.横向效果较差,需要参数优化\n\n=======\n\ntypedef enum{\n CS55 = 1, /**< LCM消息*/\n}VehicleType;\n\ntypedef enum{\n INVALID = 0, /**< 无效*/\n HUMAN_DRIVING_MODE = 1, /**< 人工驾驶*/\n AUTO_DRIVING_MODE = 3, /**< 自动驾驶*/\n}DrivingMode;\n\ntypedef enum{\n UNCONTROLLABLE = 0, /**< 不可控*/\n CONTROLLABLE = 1, /**< 可控但处于非控制状态*/\n CONTROLLED = 2, /**< 正在控制中*/\n}DrivingModeFeedBack;\n\ntypedef enum{\n POSITION_P = 0, /**< P档位*/\n POSITION_R = 1, /**< R档位*/\n POSITION_N = 2, /**< N档位*/\n POSITION_D = 3, /**< D档位*/\n}GearLevel;\n\ntypedef enum{\n POSITION_P_FEEDBACK = 0, /**< P档位反馈*/\n POSITION_R_FEEDBACK = 1, /**< R档位反馈*/\n POSITION_N_FEEDBACK = 2, /**< N档位反馈*/\n POSITION_D_FEEDBACK = 3, /**< D档位反馈*/\n POSITION_SHIFTING_FEEDBACK = 4, /**< 档位切换过程中*/\n POSITION_INVALID_FEEDBACK = 5, /**< 无效档位*/\n}GearLevelFeedBack;\n\n#### 更新日志\n1.新建分支 athena_control_alex,后面的更改在该分支下更改\n2.修改文件生成目录,将controller_lib动态链接库默认生成到config目录,对应修改control工程的库文件查找目录\n3.将坐标转换功能提前到路径判断之前,方便在规划无路径下发时也能显示车辆状态\n4.仿真测试OK,未实车测试\n\n\n1.经历了实车测试,仅测试横向控制\n2.修改配置,使在release编译时使用arm交叉编译\n3.规划仅在启动自动驾驶时重规划,会发送启动指令,但退出自动驾驶不会重规划,不会发送退出指令\n4.横向效果较差,需要参数优化\n#### 更新日志 mwb\n\n1、在controller_agent.cpp文件void ControllerAgent::PathConvert(path& path_msg,const Trajectory *trajectory_msg)函数中添加\n\ttar_speed赋值语句temp_point_.tar_speed_ = trajectory_msg ->points_[i].p_v_\n\n2、在controller_agent.cpp文件 void ControllerAgent::GetControllerInfo(DebugOutput &debug_output)函数中添加 \n\ttemp_point_.tar_speed_ = it->tar_speed_;\n\n3、在control_logic.cpp文件void ControlLogic::GetControlLogicDebugOutput(ControlLogicDebugOutput &control_logic_debug_output)函数中\n修改\n\tcontrol_logic_debug_output_ = control_logic_debug_output;\n为\n\tcontrol_logic_debug_output = control_logic_debug_output_;\n\n4、修改\n typedef enum{\n\t UNCONTROLLABLE = 0, /**< 不可控*/\n\t CONTROLLABLE = 1, /**< 可控但处于非控制状态*/\n\t CONTROLLED = 2, /**< 正在控制中*/\n\t \n }DrivingModeFeedBack;\n为\n typedef enum{\n\t CONTROLLABLE = 1, /**< 可控但处于非控制状态*/\n\t CONTROLLED = 2, /**< 正在控制中*/\n\t UNCONTROLLABLE = 3, /**< 不可控*/\n}DrivingModeFeedBack;\n\n5、修改打印不对应\n\n\n\n\n>>>>>>> origin/athena_control_mwb\n"
},
{
"alpha_fraction": 0.45560747385025024,
"alphanum_fraction": 0.47663551568984985,
"avg_line_length": 37.818180084228516,
"blob_id": "64cbe53ff08b3537625047620bf3e07c6e7bc266",
"content_id": "48d69567cc3b81cfec21ad4762d56636dd234dee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 428,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 11,
"path": "/athena/core/arm/Planning/include/vehicle_dynamic/nearest_point_on_spline.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include \"navi_point.h\"\n#include \"spline.h\"\n\ndouble getClosestPointOnSpline(tk::spline& sp_x, tk::spline& sp_y,\n navi_point testPoint,\n double s1, double s2, double s3,\n double lower_limit, double upper_limit,\n double resolution = 1000,\n int maxIterations = 20);\n\n"
},
{
"alpha_fraction": 0.6965649127960205,
"alphanum_fraction": 0.7550890445709229,
"avg_line_length": 29.80392074584961,
"blob_id": "c5182e14779073f1c79b7a898bcaa53127802513",
"content_id": "bbf215ad020b340f8b90d226db92002367e7ad77",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2736,
"license_type": "no_license",
"max_line_length": 173,
"num_lines": 51,
"path": "/README.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#### ATHENA_20181204\n#### 版本更新说明\n1. obu更新:a.解决初始切片下发两次问题;b.重新规划失败之后obu段错误。\n2. planning更新:a.不同速度,规划长度的参数不同,并写入配置文件;b.更改ACC算法。\n\n#### ATHENA_20181123\n#### 版本更新说明\n1. control更新:解决prescan停车刹不住的情况。\n2. /athena/examples/LCM/Singlecar/下加入重卡仿真环境launch_truck(龙灵山地图和key.route)。\n3. /athena/examples/LCM/Singlecar/launch_truck/bin/下加入sim_system,方便重卡的仿真测试。\n4. planning更新:重新规划目的地后,planning给control下发退出自动驾驶的bug修复。\n\n#### ATHENA_20181121\n#### 版本更新说明\n1. control更新(解决偶尔出现停车刹不住的情况)。\n2. /athena/examples/LCM/APPS/下加入vui的apk安装包。\n3. /athena/docs/README.md里新增软件框架图和推荐安装的第三方库的版本。\n\n#### ATHENA_20181119\n#### 版本更新说明\n1. 地图nad.osm更新。\n2. control更新(使用之前适用于unity仿真的版本)。\n3. /athena/examples/titan3/launch/路径下增加arm版sim_vui。\n4. 更新planning(a.从惯导读取车速信息而不是从can读取,并写作配置文件; b.最大加速度改为0.5m/s2; c.设置最大规划长度,并写作配置文件; d.配置文件增加对障碍物的log开关)。\n\n#### ATHENA_20181118\n#### 版本更新说明\n1. /athena/core/arm/路径下增加各模块的arm库。\n2. /athena/examples/titan3/launch/路径下增加适配titan3的athena例子。\n3. 增加/athena/install_arm.sh脚本,用于titan3或ARM核下的动态库安装;并将之前x86动态库安装脚本名称更改为install_x86.sh。\n4. 更新控制模块的库和例子。\n5. 加入/athena/docs/README.md说明文档。\n\n#### ATHENA_20181108\n#### 版本更新说明\n1. 更新头文件,路径: \"/athena/core/x86/模块名称/include/\"。\n2. 更新库文件,路径: \"/athena/core/x86/模块名称/lib/\"。\n3. 更新完头文件和库文件后,运行脚本 /athena/install.sh,将各模块动态库文件拷贝到系统目录下\"/usr/local/lib/\"。\n4. 更新examples(目前为基于LCM的单车版本),在\"/athena/examples/LCM/Singlecar/模块名称/\"目录下建立各自模块的工程,编写实例调用动态库(注:链接的动态库文件为\"/usr/local/lib/***.so\",查找的头文件路径为\"../../../../core/x86/模块名称/include/\")。\n5. 编译生成可执行文件,和配置文件一起拷贝到\"/athena/examples/LCM/Singlecar/launch/\"目录下相应的位置。\n\n\n#### 版本负责人\n章品\n\n#### 模块负责人\n\n1. 网络模块:王翠\n2. 规划模块:枚元元\n3. 控制模块:王知权\n4. 感知模块:\n\n"
},
{
"alpha_fraction": 0.6009979844093323,
"alphanum_fraction": 0.6306016445159912,
"avg_line_length": 44.01295852661133,
"blob_id": "ac5e48aa3e68f140661cffb305e9190d88acf194",
"content_id": "cb56a27699e03068d1ea1b42131ea3093bd5ce3c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 23887,
"license_type": "no_license",
"max_line_length": 160,
"num_lines": 463,
"path": "/athena/examples/LCM/Singlecar/planning/config.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"config.h\"\n\nusing namespace std;\n\nusing namespace std;\n\nstd::string OBU_URL =\"udpm://239.255.76.63:7603?ttl=3\";\n\n///<档位参数\nint AT_STATUS_P = 0; ///<档位信息,P档\nint AT_STATUS_R = 1; ///<档位信息,R档\nint AT_STATUS_N = 2; ///<档位信息,N档\nint AT_STATUS_D = 3; ///<档位信息,D档\nint AT_STATUS_M = 4; ///<档位信息,M档\n\n///<车型相关参数\ndouble WHEEL_BASE = 2.5; ///<车轮轴长,单位:米\ndouble CAR_LENGTH = 4.5; ///<车身长,单位:米\ndouble CAR_WIDTH = 1.5; ///<车身宽,单位:米\ndouble CAR_HIGH = 1.515; ///<车身高,单位:米\ndouble CAR_WEIGHT = 1245; ///<车身质量,单位kg\ndouble CAR_MIN_R = 6.0; ///<车最小转弯半径,单位:米\ndouble MAX_STEERING_ANGLE = +548; \t ///<最大方向盘转角\ndouble MIN_STEERING_ANGLE = -548; ///<最小方向盘转角\ndouble STEERING_RATIO = 15; ///<方向盘和车轮转角的比例关系\n\n///<泊车相关参数\ndouble SAFE_DISTANCE_PARK = 0.8; ///<停车的安全保护距离(前后)\ndouble SAFE_WIDTH_PARK = 0.4; ///<停车的安全保护宽度(两侧)\ndouble PARK_LANE_WIDTH = 8.0; ///<泊车时泊车通道的道路宽度\ndouble H_MIN = 3.0; ///<泊车时纵向距离H的最小值\ndouble H_MAX = 7.0; ///<泊车时纵向距离H的最大值\ndouble H_STEP = 1.0; ///<泊车时纵向距离H的取值步长\ndouble S_STEP = 1.0; ///<泊车时横向距离S的取值步长\ndouble DELTA_S_MAX = 5.0; ///<泊车时横向距离S向前搜索的范围\ndouble EXTENDED_LINE_OF_PARALLEL = 5.0; ///<平行泊车时向前延展的距离\ndouble EXTENDED_LINE_OF_VERTICAL = 5.0; ///<垂直泊车时向前延展的距离\ndouble PARK_SPEED = 5.0; ///<泊车时的速度,单位:km/h\ndouble THRESHOLD_START_PARK = 20.0; ///<车与车库中心的距离小于该阈值时,可以触发泊车\ndouble THRESHOLD_CAR_STATIC_SPEED = 0.1; ///<本车车速小于该阈值可以认为车静止,单位:m/s\nint NUM_EXTEND_TRAJECTORY = 600; ///<给控制发轨迹时,需要延长一些(点的个数)\n\n///<地图匹配参数\nint SOURCE_OF_CAR_SPEED = 0;\t\t\t ///0:从惯导获得本车当前速度;1:从can信号获得本车当前速度\nint PRIOR_MATCH_LANE = 1;\t\t\t ///<优先匹配车道,1=低速道,0=高速道\ndouble THRESHOLD_MATCH_CENTERS = 0.5;\t\t ///<匹配多车道中心线的阈值(米)\ndouble GLOBAL_SEARCH_MATCH_CENTERS = 10.0;\t ///<匹配车道中心线粗搜索时的限差范围(米)\nint NUM_BEFORE_MATCH_CENTERS = 5;\t\t ///<匹配车道中心线时向前搜索的点数\ndouble THRESHOLD_MATCH_BIAS = 10.0;\t\t ///<点匹配到中心线的距离过大,认为匹配失败(米)\ndouble COEF1_MOTION_WITHOUT_VP = 15.0;\t\t ///<没有virtual_path时(第一次规划目的地),计算MOTION_PLANNING_LENGTH的系数1(常数项)\ndouble COEF2_MOTION_WITHOUT_VP = 1.0;\t\t ///<没有virtual_path时(第一次规划目的地),计算MOTION_PLANNING_LENGTH的系数2(横向偏差系数)\ndouble COEF1_AHEAD_WITHOUT_VP = 0.0;\t\t ///<没有virtual_path时(第一次规划目的地),计算AHEAD_OF_MOTION_PLANNING的系数1(常数项)\ndouble COEF2_AHEAD_WITHOUT_VP = 0.0;\t\t ///<没有virtual_path时(第一次规划目的地),计算AHEAD_OF_MOTION_PLANNING的系数2(速度系数)\ndouble COEF1_MOTION_WITH_VP = 10.0;\t\t ///<有virtual_path时,计算MOTION_PLANNING_LENGTH的系数1(常数项)\ndouble COEF2_MOTION_WITH_VP = 3.0;\t\t ///<有virtual_path时,计算MOTION_PLANNING_LENGTH的系数2(速度系数)\ndouble THRESHOLD_HIGH_SPEED = 10.0;\t\t ///<速度高时用另一套系数\ndouble COEF1_MOTION_HIGH_SPEED = 10.0;\t\t ///<高速时时,计算MOTION_PLANNING_LENGTH的系数1(常数项)\ndouble COEF2_MOTION_HIGH_SPEED = 3.0;\t\t ///<高速时时,计算MOTION_PLANNING_LENGTH的系数2(速度系数)\ndouble COEF1_AHEAD_WITH_VP = 0.5;\t\t ///<有virtual_path时,计算AHEAD_OF_MOTION_PLANNING的系数1(常数项)\ndouble COEF2_AHEAD_WITH_VP = 0.2;\t\t ///<有virtual_path时,计算AHEAD_OF_MOTION_PLANNING的系数2(速度系数)\ndouble MIN_MOTION_LENGTH = 5.0;\t\t\t ///<一次规划的最小长度(米)\ndouble MAX_MOTION_LENGTH = 80.0;\t\t\t ///<一次规划的最大长度(米)\ndouble MAX_MOTION_DELTA_HEADING = 30.0;\t\t ///<一次规划的最大角度差(度)\ndouble INTERVAL_MAP_SECTION = 1.5;\t\t ///<下发的地图切片点的间隔(米)\ndouble SPLINE_EVERY = 0.1;\t\t\t ///<规划轨迹点间隔(米)\ndouble MAP_SPLINE_EVERY = 1.5;\t\t\t ///<地图中心线点间隔(米)\ndouble MATCH_STOP_POINT_ERROR = 2.0;\t\t ///<匹配停车点时点距离线的最小限差(米)\nint TRAFFIC_LIGHTS_CHECKS_LENGTH = 40; ///<路口红绿灯停车点的检查距离(个)\nint BEFORE_LIGHTS = 1; ///<路口提前停车距离 (个)\n\n///<障碍物\nint NUMBER_BACKWARD = 70;\t\t\t ///<障碍物根据边界过滤时向后搜索的点数\nint NUMBER_FORWARD = 70;\t\t\t ///<障碍物根据边界过滤时向前搜索的点数\ndouble PEDESTRIAN_WIDEN_DIS = 2.5;\t\t ///<行人加宽距离范围(米)\ndouble PEDESTRIAN_WIDEN_ANG = 0.0;\t\t ///<行人加宽角度范围(度)\ndouble PEDESTRIAN_WIDEN_WIDTH = 0.0;\t\t ///<行人加宽的宽度(米)\ndouble CAR_LENGTHEN_LENGTH = 0.0;\t\t ///<障碍车加长的长度(米)\nint OBSTACLE_COST_VALUE = 30000;\t\t ///<障碍物的代价值\nint B_READ_OBSTACLE_SPEED = 1;\t\t ///<0:不读取障碍物速度;1:读取障碍物速度\n\n///<碰撞检测\nint COL_CHECK_INTERVAL = 15;\t\t\t ///<碰撞检测时,虚拟车道的搜索间隔(个)\ndouble THRESHOLD_DELTA_LENGTH = 2.0;\t\t ///<虚拟车道搜索时,前后点间距超过一定的阈值,就调整搜索间隔(米)\ndouble THRESHOLD_STATIC_SPEED = 0.5;\t\t ///<速度小于THRESHOLD_STATIC_SPEED m/s,认为是静态障碍物,需要停车或避障(m/s)\ndouble RATIO_SPEED_CAR_FOLLOWING = 0.7; ///<障碍物速度小于本车速度该比例时,选择超车\ndouble CAR_FOLLOWING_SPEED_DIFF = 1.5;\t\t ///<调整车速略小于跟随的障碍物速度(m/s)\ndouble SAFE_WIDTH = 0.5; \t\t\t ///<安全保护的距离(米)\ndouble SAFE_LENGTH = 0.3; \t\t\t ///<安全保护的距离(米)\ndouble FREE_LENGTH_DIFF = 15.0;\t\t\t ///<如果车道都有碰撞,取碰撞距离更大的(米)\nint COUNT_COLLISION = 10;\t\t\t ///<持续超过COUNT_COLLISION帧有碰撞才进行重规划,否则只是减速\n//# SLOW_DOWN_STEP 0.1 #减速的步长(程序中会重新计算)\ndouble COEF_SLOW_DOWN_STEP = 0.1;\t\t ///<SLOW_DOWN_STEP 0.5 * motion_tar_speed / COUNT_COLLISION;\ndouble COEF_COL_CHECK_LENGTH = 1.5;\t ///<决定碰撞检测长度:COLLISION_CHECK_LENGTH=COEF_COL_CHECK_LENGTH * ( MOTION_PLANNING_LENGTH + AHEAD_OF_MOTION_PLANNING );\nint COUNT_SLOW = 10;\t\t\t\t ///<遇见障碍物减速后保持低速一段时间\n\n///#轨迹规划相关参数\ndouble COEF_UPDATE_TRAJECTORY = 0.4;\t\t ///<更新轨迹的比例参数,决定走过多少里程更新一次轨迹\nint AHEAD_OF_MOTION_PLANNING = 12;\t\t ///<单位(米)\nint MOTION_PLANNING_LENGTH = 42;\t\t ///<单位(米)\nint AFTER_MOTION_PLANNING_NUM = 40;\t\t ///<单位(个)\n\n///#轨迹生成【横向】\ndouble LAT_OFFSET = 4.0; \t\t\t ///<用于生成轨迹范围(左右各LAT_OFFSET米)\ndouble COEF_LIMIT_LAT_STEP = 0.1;\t\t ///<拨杆换道限制、障碍物减速过程或者经过路口时,横向偏移范围收缩的比例系数\nint NUM_TRAJ_CLUSTER = 16;\t\t\t ///<生成轨迹簇的个数(2×NUM_TRAJ_CLUSTER+1)\nint NUM_BACKWARD_TRAJ_CHECK = 5;\t\t ///<轨迹边界检测时向后搜索的点数(个)\nint NUM_FORWARD_TRAJ_CHECK = 5;\t\t ///<轨迹边界检测时向前搜索的点数(个)\nint OUT_SIDE_INTERVAL = 20;\t\t\t ///<判断轨迹是否出界时的搜索步长(个)\nint OUT_LINE_COST_VALUE = 200;\t\t ///<出车道线的代价值\nint OUT_EDGE_COST_VALUE = 60000;\t\t ///<出道路边缘的代价值\ndouble COEF_END_POS = 20.0;\t\t\t ///<综合计算代价值时,终点代价值的系数\ndouble COEF_COL_CHECK = 1.0;\t\t\t ///<综合计算代价值时,碰撞代价值的系数\ndouble COEF_LEFT = 1.0;\t\t\t\t ///<综合计算代价值时,左边线代价值的系数\ndouble COEF_RIGHT = 1.0;\t\t\t ///<综合计算代价值时,右边线代价值的系数\ndouble COEF_KS = 10000.0;\t\t\t ///<综合计算代价值时,曲率代价值的系数\ndouble THRESHOLD_KS = 0.285;\t\t\t ///<当曲率大于THRESHOLD_KS时,考虑曲率代价值\nint THRESHOLD_COST_VALUE = 20000;\t\t ///<当最优路径的代价值还大于THRESHOLD_COST_VALUE时,需要停车\n\n///#各种测试开关\nint MAP_LOG_SWITCH = 0;\nint TRAJECTORY_LOG_SWITCH = 0;\nint TRAFFIC_LIGHTS_LOG_SWITCH = 0;\nint CHANGE_LANE_LOG_SWITCH = 0;\nint OBSTACLES_LOG_SWITCH = 0;\nint LONGITUDINAL_CONTROL_LOG_SWITCH = 0;\nint MAP_MATCHING_LOG_SWITCH = 0;\nint SELECT_VALUE_LOG_SWITCH = 0;\nint VIRTUAL_PATH_LOG_SWITCH = 0; ///<实时匹配虚拟轨迹的状态\n\nint TRAJECTORY_VIEW_SWITCH = 0;\nint SPEED_PLAN_VIEW_SWITCH = 0;\nint CHANGE_LANE_VIEW_SWITCH = 0;\nint LATERAL_CONTROL_VIEW_SWITCH = 0;\nint LONGITUDINAL_CONTROL_VIEW_SWITCH = 0;\nint MAP_MATCHING_VIEW_SWITCH = 0;\nint COLLISION_CHECK_VIEW_SWITCH = 0;\nint PLANNING_VALUE_VIEW_SWITCH = 0;\nint NEXTWORK_CHANGELANE_VIEW_SWITCH = 0;\nint SELECT_VALUE_VIEW_SWITCH = 0;\n\n///#轨迹生成【纵向】\nint STOP_LENGTH_TO_OBS = 100;\t ///<停车位置距离障碍物的距离\ndouble COEF_KS_SPEED = 0.005;\t\t\t ///<速度、曲率转换\ndouble MIN_SPEED = 1.0;\t\t\t\t ///<最小速度(m/s)\ndouble MAX_ACCELERATION = 1.0;\t\t\t ///<最大加速度\ndouble MAX_DECELERATION = 0.5;\t\t\t ///<最大减速度\n\n///#origin point\ndouble ORIGIN_LAT = 31.281675599;\t ///<坐标原点纬度 #shanghai\ndouble ORIGIN_LON = 121.163174090; ///<坐标原点经度\n\n\nbool read_motion_plan_config_value_from_file()\n{\n\tconst char ConfigFile[] = \"planning_value.cfg\";\n\tConfig configSettings(ConfigFile);\n\n\t//开始读取配置文件\n\tOBU_URL = \"udpm://239.255.76.22:7622?ttl=3\";\n\tOBU_URL = configSettings.Read(\"OBU_URL\", OBU_URL);\n\n // 档位参数\n AT_STATUS_P = configSettings.Read(\"AT_STATUS_P\",0);\n AT_STATUS_R = configSettings.Read(\"AT_STATUS_R\",1);\n AT_STATUS_N = configSettings.Read(\"AT_STATUS_N\",2);\n AT_STATUS_D = configSettings.Read(\"AT_STATUS_D\",3);\n AT_STATUS_M = configSettings.Read(\"AT_STATUS_M\",4);\n\n //车型相关参数\n WHEEL_BASE = configSettings.Read(\"WHEEL_BASE\",2.5);\n CAR_LENGTH = configSettings.Read(\"CAR_LENGTH\",4.5);\n CAR_WIDTH = configSettings.Read(\"CAR_WIDTH\",1.5);\n CAR_HIGH = configSettings.Read(\"CAR_HIGH\",1.515);\n CAR_WEIGHT = configSettings.Read(\"CAR_WEIGHT\",1245); //kg\n CAR_MIN_R = configSettings.Read(\"CAR_MIN_R\",6.0);\n MAX_STEERING_ANGLE = configSettings.Read(\"MAX_STEERING_ANGLE\",+548);\n MIN_STEERING_ANGLE = configSettings.Read(\"MIN_STEERING_ANGLE\",-548); //kg\n STEERING_RATIO = configSettings.Read(\"STEERING_RATIO\",15);\n\n //泊车相关参数\n SAFE_DISTANCE_PARK = configSettings.Read(\"SAFE_DISTANCE_PARK\",0.8);\n SAFE_WIDTH_PARK = configSettings.Read(\"SAFE_WIDTH_PARK\",0.4);\n PARK_LANE_WIDTH = configSettings.Read(\"PARK_LANE_WIDTH\",8.0);\n H_MIN = configSettings.Read(\"H_MIN\",3.0);\n H_MAX = configSettings.Read(\"H_MAX\",7.0);\n H_STEP = configSettings.Read(\"H_STEP\",1.0);\n S_STEP = configSettings.Read(\"S_STEP\",1.0);\n DELTA_S_MAX = configSettings.Read(\"DELTA_S_MAX\",5.0);\n EXTENDED_LINE_OF_PARALLEL = configSettings.Read(\"EXTENDED_LINE_OF_PARALLEL\",5.0);\n EXTENDED_LINE_OF_VERTICAL = configSettings.Read(\"EXTENDED_LINE_OF_VERTICAL\",5.0); //kg\n PARK_SPEED = configSettings.Read(\"PARK_SPEED\",5.0);\n THRESHOLD_START_PARK = configSettings.Read(\"THRESHOLD_START_PARK\",20.0);\n THRESHOLD_CAR_STATIC_SPEED = configSettings.Read(\"THRESHOLD_CAR_STATIC_SPEED\",0.1); //kg\n NUM_EXTEND_TRAJECTORY = configSettings.Read(\"NUM_EXTEND_TRAJECTORY\",600);\n\n //地图匹配参数\n SOURCE_OF_CAR_SPEED = configSettings.Read(\"SOURCE_OF_CAR_SPEED\",0);\n PRIOR_MATCH_LANE = configSettings.Read(\"PRIOR_MATCH_LANE\",1);\n THRESHOLD_MATCH_CENTERS = configSettings.Read(\"THRESHOLD_MATCH_CENTERS\",0.5);\n GLOBAL_SEARCH_MATCH_CENTERS = configSettings.Read(\"GLOBAL_SEARCH_MATCH_CENTERS\",10.0);\n NUM_BEFORE_MATCH_CENTERS = configSettings.Read(\"NUM_BEFORE_MATCH_CENTERS\",5);\n THRESHOLD_MATCH_BIAS = configSettings.Read(\"THRESHOLD_MATCH_BIAS\",10.0);\n COEF1_MOTION_WITHOUT_VP = configSettings.Read(\"COEF1_MOTION_WITHOUT_VP\",15.0);\n COEF2_MOTION_WITHOUT_VP = configSettings.Read(\"COEF2_MOTION_WITHOUT_VP\",1.0);\n COEF1_AHEAD_WITHOUT_VP = configSettings.Read(\"COEF1_AHEAD_WITHOUT_VP\",0.0);\n COEF2_AHEAD_WITHOUT_VP = configSettings.Read(\"COEF2_AHEAD_WITHOUT_VP\",0.0);\n COEF1_MOTION_WITH_VP = configSettings.Read(\"COEF1_MOTION_WITH_VP\",10.0);\n COEF2_MOTION_WITH_VP = configSettings.Read(\"COEF2_MOTION_WITH_VP\",3.0);\n THRESHOLD_HIGH_SPEED = configSettings.Read(\"THRESHOLD_HIGH_SPEED\",0.0);\n COEF1_MOTION_HIGH_SPEED = configSettings.Read(\"COEF1_MOTION_HIGH_SPEED\",0.0);\n COEF2_MOTION_HIGH_SPEED = configSettings.Read(\"COEF2_MOTION_HIGH_SPEED\",0.0);\n COEF1_AHEAD_WITH_VP = configSettings.Read(\"COEF1_AHEAD_WITH_VP\",0.5);\n COEF2_AHEAD_WITH_VP = configSettings.Read(\"COEF2_AHEAD_WITH_VP\",0.2);\n MIN_MOTION_LENGTH = configSettings.Read(\"MIN_MOTION_LENGTH\",5.0);\n MAX_MOTION_LENGTH = configSettings.Read(\"MAX_MOTION_LENGTH\",80.0);\n MAX_MOTION_DELTA_HEADING = configSettings.Read(\"MAX_MOTION_DELTA_HEADING\",30.0);\n INTERVAL_MAP_SECTION = configSettings.Read(\"INTERVAL_MAP_SECTION\",1.5);\n SPLINE_EVERY = configSettings.Read(\"SPLINE_EVERY\",0.1);\n MAP_SPLINE_EVERY = configSettings.Read(\"MAP_SPLINE_EVERY\",1.5);\n MATCH_STOP_POINT_ERROR = configSettings.Read(\"MATCH_STOP_POINT_ERROR\",2.0);\n TRAFFIC_LIGHTS_CHECKS_LENGTH = configSettings.Read(\"TRAFFIC_LIGHTS_CHECKS_LENGTH\",40);\n BEFORE_LIGHTS = configSettings.Read(\"BEFORE_LIGHTS\",1);\n\n //障碍物\n NUMBER_BACKWARD = configSettings.Read(\"NUMBER_BACKWARD\",70);\n NUMBER_FORWARD = configSettings.Read(\"NUMBER_FORWARD\",70);\n PEDESTRIAN_WIDEN_DIS = configSettings.Read(\"PEDESTRIAN_WIDEN_DIS\",2.5);\n PEDESTRIAN_WIDEN_ANG = configSettings.Read(\"PEDESTRIAN_WIDEN_ANG\",0.0);\n PEDESTRIAN_WIDEN_WIDTH = configSettings.Read(\"PEDESTRIAN_WIDEN_WIDTH\",0.0);\n CAR_LENGTHEN_LENGTH = configSettings.Read(\"CAR_LENGTHEN_LENGTH\",0.0);\n OBSTACLE_COST_VALUE = configSettings.Read(\"OBSTACLE_COST_VALUE\",30000);\n B_READ_OBSTACLE_SPEED = configSettings.Read(\"B_READ_OBSTACLE_SPEED\",1);\n\n //碰撞检测\n COL_CHECK_INTERVAL = configSettings.Read(\"COL_CHECK_INTERVAL\",15);\n THRESHOLD_DELTA_LENGTH = configSettings.Read(\"THRESHOLD_DELTA_LENGTH\",2.0);\n THRESHOLD_STATIC_SPEED = configSettings.Read(\"THRESHOLD_STATIC_SPEED\",0.5);\n RATIO_SPEED_CAR_FOLLOWING = configSettings.Read(\"RATIO_SPEED_CAR_FOLLOWING\",0.7);\n CAR_FOLLOWING_SPEED_DIFF = configSettings.Read(\"CAR_FOLLOWING_SPEED_DIFF\",1.5);\n SAFE_WIDTH = configSettings.Read(\"SAFE_WIDTH\",0.5);\n SAFE_LENGTH = configSettings.Read(\"SAFE_LENGTH\",0.3);\n FREE_LENGTH_DIFF = configSettings.Read(\"FREE_LENGTH_DIFF\",15.0);\n COUNT_COLLISION = configSettings.Read(\"COUNT_COLLISION\",10);\n //SLOW_DOWN_STEP = configSettings.Read(\"SLOW_DOWN_STEP\",0.1);\n COEF_SLOW_DOWN_STEP = configSettings.Read(\"COEF_SLOW_DOWN_STEP\",0.1);\n COEF_COL_CHECK_LENGTH = configSettings.Read(\"COEF_COL_CHECK_LENGTH\",1.5);\n COUNT_SLOW = configSettings.Read(\"COUNT_SLOW\",10);\n\n //轨迹规划相关参数\n COEF_UPDATE_TRAJECTORY = configSettings.Read(\"COEF_UPDATE_TRAJECTORY\",0.4);\n AHEAD_OF_MOTION_PLANNING = configSettings.Read(\"AHEAD_OF_MOTION_PLANNING\",12); //安全保护的距离\n MOTION_PLANNING_LENGTH = configSettings.Read(\"MOTION_PLANNING_LENGTH\",42); //安全保护的距离\n AFTER_MOTION_PLANNING_NUM = configSettings.Read(\"AFTER_MOTION_PLANNING_NUM\",40); //安全保护的距离\n\n //轨迹生成【横向】\n LAT_OFFSET = configSettings.Read(\"LAT_OFFSET\",4.0);\n COEF_LIMIT_LAT_STEP = configSettings.Read(\"COEF_LIMIT_LAT_STEP\",0.1);\n NUM_TRAJ_CLUSTER = configSettings.Read(\"NUM_TRAJ_CLUSTER\",16);\n NUM_BACKWARD_TRAJ_CHECK = configSettings.Read(\"NUM_BACKWARD_TRAJ_CHECK\",5);\n NUM_FORWARD_TRAJ_CHECK = configSettings.Read(\"NUM_FORWARD_TRAJ_CHECK\",5);\n OUT_SIDE_INTERVAL = configSettings.Read(\"OUT_SIDE_INTERVAL\",20);\n OUT_LINE_COST_VALUE = configSettings.Read(\"OUT_LINE_COST_VALUE\",200);\n OUT_EDGE_COST_VALUE = configSettings.Read(\"OUT_EDGE_COST_VALUE\",60000);\n COEF_END_POS = configSettings.Read(\"COEF_END_POS\",20.0);\n COEF_COL_CHECK = configSettings.Read(\"COEF_COL_CHECK\",1.0);\n COEF_LEFT = configSettings.Read(\"COEF_LEFT\",1.0);\n COEF_RIGHT = configSettings.Read(\"COEF_RIGHT\",1.0);\n COEF_KS = configSettings.Read(\"COEF_KS\",10000.0);\n THRESHOLD_KS = configSettings.Read(\"THRESHOLD_KS\",0.285);\n THRESHOLD_COST_VALUE = configSettings.Read(\"THRESHOLD_COST_VALUE\",20000);\n\n //各种测试开关\n MAP_LOG_SWITCH = configSettings.Read(\"MAP_LOG_SWITCH\",0);\n TRAJECTORY_LOG_SWITCH = configSettings.Read(\"TRAJECTORY_LOG_SWITCH\",0);\n TRAFFIC_LIGHTS_LOG_SWITCH = configSettings.Read(\"TRAFFIC_LIGHTS_LOG_SWITCH\",0);\n CHANGE_LANE_LOG_SWITCH = configSettings.Read(\"CHANGE_LANE_LOG_SWITCH\",0);\n OBSTACLES_LOG_SWITCH = configSettings.Read(\"OBSTACLES_LOG_SWITCH\",0);\n LONGITUDINAL_CONTROL_LOG_SWITCH = configSettings.Read(\"LONGITUDINAL_CONTROL_LOG_SWITCH\",0);\n MAP_MATCHING_LOG_SWITCH = configSettings.Read(\"MAP_MATCHING_LOG_SWITCH\",0);\n SELECT_VALUE_LOG_SWITCH = configSettings.Read(\"SELECT_VALUE_LOG_SWITCH\",0);\n VIRTUAL_PATH_LOG_SWITCH = configSettings.Read(\"VIRTUAL_PATH_LOG_SWITCH\",0);\n TRAJECTORY_VIEW_SWITCH = configSettings.Read(\"TRAJECTORY_VIEW_SWITCH\",0);\n SPEED_PLAN_VIEW_SWITCH = configSettings.Read(\"SPEED_PLAN_VIEW_SWITCH\",0);\n CHANGE_LANE_VIEW_SWITCH = configSettings.Read(\"CHANGE_LANE_VIEW_SWITCH\",0);\n LATERAL_CONTROL_VIEW_SWITCH = configSettings.Read(\"LATERAL_CONTROL_VIEW_SWITCH\",0);\n LONGITUDINAL_CONTROL_VIEW_SWITCH = configSettings.Read(\"LONGITUDINAL_CONTROL_VIEW_SWITCH\",0);\n MAP_MATCHING_VIEW_SWITCH = configSettings.Read(\"MAP_MATCHING_VIEW_SWITCH\",0);\n COLLISION_CHECK_VIEW_SWITCH = configSettings.Read(\"COLLISION_CHECK_VIEW_SWITCH\",0);\n PLANNING_VALUE_VIEW_SWITCH = configSettings.Read(\"PLANNING_VALUE_VIEW_SWITCH\",0);\n NEXTWORK_CHANGELANE_VIEW_SWITCH = configSettings.Read(\"NEXTWORK_CHANGELANE_VIEW_SWITCH\",0);\n SELECT_VALUE_VIEW_SWITCH = configSettings.Read(\"SELECT_VALUE_VIEW_SWITCH\",0);\n\n //轨迹生成【纵向】\n STOP_LENGTH_TO_OBS = configSettings.Read(\"STOP_LENGTH_TO_OBS\",100);\n COEF_KS_SPEED = configSettings.Read(\"COEF_KS_SPEED\",0.005);\n MIN_SPEED = configSettings.Read(\"MIN_SPEED\",1.0);\n MAX_ACCELERATION = configSettings.Read(\"MAX_ACCELERATION\",1.0);\n MAX_DECELERATION = configSettings.Read(\"MAX_DECELERATION\",0.5);\n\n //#origin point\n ORIGIN_LAT = configSettings.Read(\"ORIGIN_LAT\",31.281675599);\n ORIGIN_LON = configSettings.Read(\"ORIGIN_LON\",121.163174090);\n\n\n return true;\n}\n\n\n\nbool write_config_value_from_file()\n{\n return true;\n}\n\n\n\n/************************************************************* Config ************************************************************************/\nConfig::Config( string filename,\n string delimiter,\n string comment )\n : m_Delimiter(delimiter), m_Comment(comment)\n{\n std::ifstream in( filename.c_str() );\n\n if( !in ) throw File_not_found( filename );\n\n in >> (*this);\n}\n\n\nConfig::Config()\n : m_Delimiter( string(1,'=') ), m_Comment( string(1,'#') )\n{\n\n}\n\nbool Config::KeyExists( const string& key ) const\n{\n // Indicate whether key is found\n mapci p = m_Contents.find( key );\n return ( p != m_Contents.end() );\n}\n\n\n/* static */\nvoid Config::Trim( string& inout_s )\n{\n static const char whitespace[] = \" \\n\\t\\v\\r\\f\";\n inout_s.erase( 0, inout_s.find_first_not_of(whitespace) );\n inout_s.erase( inout_s.find_last_not_of(whitespace) + 1U );\n}\n\n\nstd::ostream& operator<<( std::ostream& os, const Config& cf )\n{\n // Save a Config to os\n for( Config::mapci p = cf.m_Contents.begin();\n p != cf.m_Contents.end();\n ++p )\n {\n os << p->first << \" \" << cf.m_Delimiter << \" \";\n os << p->second << std::endl;\n }\n return os;\n}\n\nvoid Config::Remove( const string& key )\n{\n m_Contents.erase( m_Contents.find( key ) );\n return;\n}\n\nstd::istream& operator>>( std::istream& is, Config& cf )\n{\n // Load a Config from is\n // Read in keys and values, keeping internal whitespace\n typedef string::size_type pos;\n const string& delim = cf.m_Delimiter; // separator\n const string& comm = cf.m_Comment; // comment\n const pos skip = delim.length(); // length of separator\n\n string nextline = \"\"; // might need to read ahead to see where value ends\n\n while( is || nextline.length() > 0 )\n {\n // Read an entire line at a time\n string line;\n if( nextline.length() > 0 )\n {\n line = nextline; // we read ahead; use it now\n nextline = \"\";\n }\n else\n {\n std::getline( is, line );\n }\n\n // Ignore comments\n line = line.substr( 0, line.find(comm) );\n\n // Parse the line if it contains a delimiter\n pos delimPos = line.find( delim );\n if( delimPos < string::npos )\n {\n // Extract the key\n string key = line.substr( 0, delimPos );\n line.replace( 0, delimPos+skip, \"\" );\n\n // See if value continues on the next line\n // Stop at blank line, next line with a key, end of stream,\n // or end of file sentry\n bool terminate = false;\n while( !terminate && is )\n {\n std::getline( is, nextline );\n terminate = true;\n\n string nlcopy = nextline;\n Config::Trim(nlcopy);\n if( nlcopy == \"\" ) continue;\n\n nextline = nextline.substr( 0, nextline.find(comm) );\n if( nextline.find(delim) != string::npos )\n continue;\n\n nlcopy = nextline;\n Config::Trim(nlcopy);\n if( nlcopy != \"\" ) line += \"\\n\";\n line += nextline;\n terminate = false;\n }\n\n // Store key and value\n Config::Trim(key);\n Config::Trim(line);\n cf.m_Contents[key] = line; // overwrites if key is repeated\n }\n }\n\n return is;\n}\nbool Config::FileExist(std::string filename)\n{\n bool exist= false;\n std::ifstream in( filename.c_str() );\n if( in )\n exist = true;\n return exist;\n}\n\nvoid Config::ReadFile( string filename,\n string delimiter,\n string comment )\n{\n m_Delimiter = delimiter;\n m_Comment = comment;\n std::ifstream in( filename.c_str() );\n\n if( !in )\n throw File_not_found( filename );\n\n in >> (*this);\n}\n\n"
},
{
"alpha_fraction": 0.5414178371429443,
"alphanum_fraction": 0.5670902729034424,
"avg_line_length": 30.001583099365234,
"blob_id": "f0abc9232a4dd4a4653184c70e76109992bc5d1f",
"content_id": "76e27de7eadc209267171633d2e2fddbbccf3cae",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 24991,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 632,
"path": "/athena/core/x86/Common/include/base/nad_enum.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_enum.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:公共枚举值\n-------------------------------------------------------*/\n#ifndef _NAD_ENUM_H\n#define _NAD_ENUM_H\n\n\n//枚举值数据类型为int\n\n\n//----------------------------------------------------------\n// 网元定义枚举值\n//----------------------------------------------------------\n\n//网元类型\n#define E_NE_CSU 0 //CSU\n#define E_NE_RSU 1 //RSU\n#define E_NE_OBU 2 //OBU\n#define E_NE_RSD 3 //OBU\n#define E_NE_RSD_CAMERA 10 //路测-摄影头\n#define E_NE_RSD_LIDAR 11 //路测-激光雷达\n#define E_NE_ETS_LIGHT 20 //电子红绿灯\n#define E_NE_ETS_LIMSPEED 21 //电子施工标志\n#define E_NE_ETS_BLOCK 22 //电子限速牌\nconst char *ne_str(const int val);\n\n//网元状态\n#define NE_STATUS_OFF 0 //不在位\n#define NE_STATUS_ON 1 //在位\nconst char *ne_status_str(const int val);\n\n//心跳超时时长,25000毫秒=25秒\n#define HTBT_TIMEOUT 250000000\n#define HTBT_TIMEOUT_OCT 50000 //5 sec\n//#define HTBT_TIMEOUT 5000000 //gdb时可以改成5000秒,避免调试时很容易心跳注销\n\n\n//----------------------------------------------------------\n// obu上的obu的状态\n//----------------------------------------------------------\n\n//obu管理自身的状态机\n#define E_OBU_SESSION_OBU_DISCONN_HAND 0 //无网络人工驾驶\n#define E_OBU_SESSION_OBU_CONNECT_HAND 1 //有网络人工驾驶\n#define E_OBU_SESSION_OBU_DISCONN_AUTO 2 //无网络自动驾驶\n#define E_OBU_SESSION_OBU_CONNECT_AUTO 3 //有网络自动驾驶\n\n//obu管理自身注册状态的状态机\n#define E_OBU_SESSION_OBU_LOGIN_CSU 0 //正在向csu要个rsu_name\n#define E_OBU_SESSION_OBU_LOGIN_RSU 1 //正在注册到rsu\n#define E_OBU_SESSION_OBU_LOGIN_OK 2 //注册成功\n\n//obu请求启动自动驾驶的状态机\n#define E_OBU_SESSION_OBU_START_AUTO_WAIT 0 //已发送or_start_auto_request消息,等待应答\n#define E_OBU_SESSION_OBU_START_AUTO_OK 1 //收到ro_start_auto_respond应答,启动成功\n\n/**\n * class obu_session_obu\n * {\n * int session_status; //主状态,枚举值为E_OBU_SESSION_OBU_XXX\n * int login_status; //登录状态,枚举值为E_OBU_SESSION_OBU_LOGIN_XXX\n * } obu;\n *\n * //每秒处理obu状态\n * if (obu.session_status == DISCONN_HAND || obu.session_status == DISCONN_AUTO)\n * {\n * 根据obu.login_status发消息注册\n * }\n * else if (obu.htbt_count > HTBT_TIMEOUT) //心跳超时\n * {\n * obu.session_status = DISCONN_AUTO; //根据降级原则,首先降级为DISCONN_AUTO\n * obu.login_status = LOGIN_CSU; //开始不断的注册\n * }\n *\n * //收到了ro_obu_logout_notify(rsu或csu心跳超时注销了obu),重新注册\n * {\n * obu.session_status = DISCONN_AUTO; //根据降级原则,首先降级为DISCONN_AUTO\n * obu.login_status = LOGIN_CSU; //开始不断的注册\n * }\n**/\n\n\n//----------------------------------------------------------\n// rsu上的obu的状态\n//----------------------------------------------------------\n\n//rsu上的obu状态\n#define E_RSU_SESSION_OBU_DISCONN 0 //无网络\n#define E_RSU_SESSION_OBU_CONNECT_HAND 1 //有网络人工驾驶\n#define E_RSU_SESSION_OBU_CONNECT_AUTO 3 //有网络自动驾驶\n\n/**\n * class rsu_session_obu\n * {\n * int session_status; //主状态,枚举值为E_RSU_SESSION_OBU_XXX\n * nad_planning planning; //路径规划,planning.list.size()==0表示无规划\n * int htbt_count; //心跳计数,每次发送rc_info_report则htbt_count++;\n * //每次收到cr_info_report则htbt_count=0,htbt_count>HTBT_TIMEOUT则超时\n * } obu;\n *\n * //每秒处理obu状态\n * if (obu.session_status != DISCONN && obu.htbt_count > HTBT_TIMEOUT) //心跳超时\n * {\n * 发消息rc_obu_logout_notify、ro_obu_logout_notify\n * 删除rsu_session_obu\n * }\n**/\n\n\n//----------------------------------------------------------\n// csu上的obu的状态\n//----------------------------------------------------------\n\n//csu上的obu状态\n#define E_CSU_SESSION_OBU_DISCONN 0 //无网络\n#define E_CSU_SESSION_OBU_CONNECT_HAND 1 //有网络人工驾驶\n#define E_CSU_SESSION_OBU_CONNECT_AUTO 3 //有网络自动驾驶\n\n/**\n * class csu_session_obu\n * {\n * int session_status; //主状态,枚举值为E_RSU_SESSION_OBU_XXX\n * nad_planning planning; //路径规划,planning.list.size()==0表示无规划\n * int htbt_count; //心跳计数,每次发送rc_info_report则htbt_count++;\n * //每次收到cr_info_report则htbt_count=0,htbt_count>HTBT_TIMEOUT则超时\n * } obu;\n *\n * //每秒处理obu状态\n * if (obu.session_status != DISCONN && obu.htbt_count > HTBT_TIMEOUT) //心跳超时\n * {\n * 发消息cr_obu_logout_notify\n * 删除csu_session_obu\n * }\n**/\n\n\n//----------------------------------------------------------\n// rsu的状态\n//----------------------------------------------------------\n\n//rsu管理自身的状态机\n#define E_RSU_SESSION_RSU_DISCONN 0 //未连接\n#define E_RSU_SESSION_RSU_CONNECT 1 //已连接\n\n//csu上的rsu状态\n#define E_CSU_SESSION_RSU_DISCONN 0 //未连接\n#define E_CSU_SESSION_RSU_CONNECT 1 //已连接\n\n\n//----------------------------------------------------------\n// ets的枚举值\n//----------------------------------------------------------\n\n/**\n * rsu如果收到ets的er_ets_report消息,rsu就会创建对应的rsu_session_light/rsu_session_limspeed/rsu_session_block\n * 并在每秒上报中通过rc_info_report、ro_vui_report通知到csu、obu,其中csu同理创建session。\n *\n * rsu收到er_ets_report消息时会把rsu_session_light/rsu_session_limspeed/rsu_session_block的htbt_count=0,\n * rsu的一秒定时器会把rsu_session_light/rsu_session_limspeed/rsu_session_block的htbt_count++,\n * 如果htbt_count > HTBT_TIMEOUT,就会删除rsu_session_light/rsu_session_limspeed/rsu_session_block。\n *\n * csu收到rc_info_report会做同样的心跳处理,如果某ets.htbt_count > HTBT_TIMEOUT,删除对应session\n**/\n\n//交通元素类型枚举值\n#define TET_NONE -1 //未知\n#define TET_LANE 0 //车道\n#define TET_LIGHT 10 //红绿灯\n#define TET_LIMSPEED 11 //限速牌\n#define TET_BLOCK 12 //施工标志\n#define TET_CROSSING 20 //路口\nconst char *tet_str(const int val);\n\n //红绿灯取值\n#define LS_NONE 0 //离线\n#define LS_GREEN 1 //绿灯\n#define LS_YELLOW 2 //黄灯\n#define LS_RED 3 //红灯\nconst char *ls_str(const int val);\n\n\n//----------------------------------------------------------\n// rsd的枚举值\n//----------------------------------------------------------\n\n//障碍物类型\n/*\n#define OT_OBU 0 //安装了OBU的联网车辆(包括仅支持ADAS的车)\n#define OT_CAR 1 //没安装OBU的社会车辆\n#define OT_PEOPLE 2 //行人\n#define OT_BLOCK 3 //普通实物单车道施工标志\n#define OT_OTHER 4 //其他障碍物*/\n\n#define OT_OBU 0 //安装了OBU的联网车辆(包括仅支持ADAS的车)\n#define OT_CAR 1 //没安装OBU的社会车辆\n#define OT_PEOPLE 2 //行人\n#define OT_BLOCK 3\n#define OT_OTHER 4\n#define OT_BUS 5\n#define OT_STONE 6\n#define OT_CONE 7\n\nconst char *ot_str(const int val);\n\n//各种障碍物的距离抗抖动\n#define SAME_DIST_OBU 2.0 //OT_OBU在1米内认为是同一障碍物\n#define SAME_DIST_CAR 2.0 //OT_OBU和OT_CAR在2米内认为是同一障碍物\n#define SAME_DIST_PEOPLE 1.0 //OT_PEOPLE在1米内认为是同一障碍物\n#define SAME_DIST_BLOCK 1.0 //OT_BLOCK在1米内认为是同一障碍物\n#define SAME_DIST_OTHER 1.0 //OT_OTHER在1米内认为是同一障碍物\n#define SAME_DIST_BUS 2.0 //\n#define SAME_DIST_STONE 1.0\n#define SAME_DIST_CONE 1.0\n\n\n//传感器类型\n#define RSD_SENSOR_CAMERA 0 //摄影头\n#define RSD_SENSOR_LIDAR 1 //激光雷达\n#define RSD_SENSOR_RADAR 2 //毫米波雷达\nconst char *rsd_sensor_str(const int val);\n\n\n//----------------------------------------------------------\n// 路径规划的枚举值\n//----------------------------------------------------------\ntypedef enum\n{\n KP_NONE = 0, //不是关键点(即:拟合点)\n KP_NORMAL, //未赋予关键点类型\n KP_ROUTE_START, //规划的起始点(vui)\n KP_ROUTE_STOP, //规划的结束点(-1~2是永久保留点)(vui)\n KP_CHANGE_LANE_LEFT, //向左换道点\n KP_CHANGE_LANE_RIGHT, //向右换道点\n KP_CO_CHANGE_LANE, //协作:协作式换道(vui)\n KP_CURRENT, //当前车辆位置(vui)\n KP_LIGHT_ACTIVE, //红绿灯激活点\n KP_LIGHT_STOP, //红绿灯停止点(vui)\n KP_LIGHT_EXIT, //红绿灯退出点\n KP_LIMSPEED, //电子限速牌(vui)\n KP_BLOCK, //电子施工标志(vui)\n KP_PARKING, //泊车点\n KP_OT_OBU, //障碍物:安装了OBU的联网车辆(包括仅支持ADAS的车)\n KP_OT_CAR, //障碍物:没安装OBU的社会车辆\n KP_OT_PEOPLE, //障碍物:行人\n KP_OT_BLOCK, //障碍物:普通实物单车道施工标志\n KP_OT_OTHER, //障碍物:其他\n KP_OT_BUS, //障碍物:其他\n KP_OT_STONE, //障碍物:其他\n KP_OT_CONE, //障碍物:其他\n KP_W_SINGLE, //预警:单车道\n KP_W_CHANGE_OFF, //预警:禁止换道\n KP_W_CHANGE_ON, //预警:解除禁止换道\n KP_W_CAMERA_ON, //预警: 打开camera\n KP_W_CAMERA_OFF, //预警: 关闭camera\n KP_W_RADAR_ON, //预警: 打开radar\n KP_W_RADAR_OFF, //预警: 关闭radar\n KP_W_LIMSPEED_OFF, //预警:取消限速\n KP_W_DEGRADE, //预警:降级驾驶 通过warning测试专用\n KP_W_ROAD_LEVEL_1, //道路等级1\n KP_W_ROAD_LEVEL_2, //道路等级2\n KP_W_ROAD_LEVEL_3, //道路等级3\n KP_W_ROAD_LEVEL_4, //道路等级4\n MAX_KP_TYPE\n} ;\n\n/*\n//key_point类型\n#define KP_NONE -1 //不是关键点(即:拟合点)\n#define KP_NORMAL 0 //未赋予关键点类型\n#define KP_ROUTE_START 1 //规划的起始点(vui)\n#define KP_ROUTE_STOP 2 //规划的结束点(-1~2是永久保留点)(vui)\n#define KP_CHANGE_LANE_LEFT 4 //向左换道点\n#define KP_CHANGE_LANE_RIGHT 5 //向右换道点\n#define KP_CO_CHANGE_LANE 6 //协作:协作式换道(vui)\n#define KP_CURRENT 10 //当前车辆位置(vui)\n#define KP_LIGHT_ACTIVE 20 //红绿灯激活点\n#define KP_LIGHT_STOP 21 //红绿灯停止点(vui)\n#define KP_LIGHT_EXIT 22 //红绿灯退出点\n#define KP_LIMSPEED 23 //电子限速牌(vui)\n#define KP_BLOCK 24 //电子施工标志(vui)\n#define KP_PARKING 25 //泊车点\n\n#define KP_OT_OBU 30 //障碍物:安装了OBU的联网车辆(包括仅支持ADAS的车)\n#define KP_OT_CAR 31 //障碍物:没安装OBU的社会车辆\n#define KP_OT_PEOPLE 32 //障碍物:行人\n#define KP_OT_BLOCK 33 //障碍物:普通实物单车道施工标志\n#define KP_OT_OTHER 34 //障碍物:其他\n#define KP_OT_BUS 35 //障碍物:其他\n#define KP_OT_STONE 36 //障碍物:其他\n#define KP_OT_CONE 37 //障碍物:其他\n\n#define KP_W_SINGLE 40 //预警:单车道\n#define KP_W_CHANGE_OFF 41 //预警:禁止换道\n#define KP_W_CHANGE_ON 42 //预警:解除禁止换道\n#define KP_W_CAMERA_ON 43 //预警: 打开camera\n#define KP_W_CAMERA_OFF 44 //预警: 关闭camera\n#define KP_W_RADAR_ON 45 //预警: 打开radar\n#define KP_W_RADAR_OFF 46 //预警: 关闭radar\n#define KP_W_LIMSPEED_OFF 47 //预警:取消限速\n#define KP_W_DEGRADE 48 //预警:降级驾驶 通过warning测试专用\n#define KP_W_ROAD_LEVEL_1 49 //道路等级1\n#define KP_W_ROAD_LEVEL_2 50 //道路等级2\n#define KP_W_ROAD_LEVEL_3 51 //道路等级3\n#define KP_W_ROAD_LEVEL_4 52 //道路等级4\n*/\n\nconst char *kp_str(const int val);\n\n\n//----------------------------------------------------------\n// 编队的枚举值\n//----------------------------------------------------------\n\n//修改编队的操作类型\n#define SP_ADD_OBU_TO_TAIL 0 //在队尾追加车辆\n#define SP_DELETE_OBU 1 //删除车辆\n#define SP_SET_SPEED 2 //修改编队整体车速\n#define SP_JOIN_OBU 3 //车队中间插入车辆\nconst char *sp_str(const int val);\n\n//删除编队的操作类型\n#define DPR_ARRIVED 0 //编队整体到达目的地\n#define DPR_DELETE_ALL 1 //除头车外所有车辆都被删除\n#define DPR_OCT_FORCE 2 //OCT为了调试强制解散编队\nconst char *dpr_str(const int val);\n\n\n//----------------------------------------------------------\n// obu的枚举值\n//----------------------------------------------------------\n\n//加速度模式\n#define ACC_P_CONSTANT_SPEED 0 //恒速\n#define ACC_P_CONSTANT_ACC 1 //匀加速\n#define ACC_P_CONSTANT_DECE 2 //匀减速\n#define ACC_P_RAPID_ACC 3 //急加速\n#define ACC_P_RAPID_DECE 4 //急减速\n\n//一种类型的传感器最大数量,要和message.lcm保持一致\n#define MAX_SENSOR 16\n\n//传感器状态\n#define SENSOR_NONE 0 //传感器不在位\n#define SENSOR_OK 1 //传感器正常\n#define SENSOR_ERR 2 //传感器故障\nconst char *sensor_str(const int val);\n\n//传感器枚举值\n#define IDX_GPS_CENTER 0 //中置GPS组合导航\n#define IDX_INS_CENTER 1 //中置INS组合导航\n#define IDX_CAMERA_1 2 //前视车道线识别单目相机\n#define IDX_CAMERA_2 3 //前视障碍物识别双目相机\n#define IDX_RADAR_ESR 4 //前视远距离毫米波雷达(ESR)\n#define IDX_RADAR_SRR_FL 5 //左前方SRR毫米波雷达\n#define IDX_RADAR_SRR_FR 6 //右前方SRR毫米波雷达\n#define IDX_RADAR_SRR_BL 7 //左后方SRR毫米波雷达\n#define IDX_RADAR_SRR_BR 8 //右后方SRR毫米波雷达\n#define IDX_LIDAR_CENTER 9 //前视远距离激光雷达\n\n//换道原因:单车上报\n#define CL_REASON_HAND 0 //人工拨动转向摇杆要求换道\n#define CL_REASON_OBU_AVOIDING 1 //OBU避障,所以换道\n//换道原因:网络主动生成\n#define CL_REASON_STOP 5 //停车时从高速道换到低速道\n#define CL_REASON_ETS_BLOCK 2 //电子施工标志封闭部分车道,所以换道\n#define CL_REASON_BROKEN_LANE 6 //在断头车道提前换到旁边的车道\n#define CL_REASON_PLATOON_DROP 7 //编队踢掉车辆\n#define CL_REASON_RSU_AVOIDING 8 //RSU指挥OBU避障\n#define CL_REASON_PLATOON_STOP 10 //编队停车时从高速道换到低速道\n#define CL_REASON_PLATOON_JOIN 11 //编队中间加入车辆\nconst char *cl_reason_str(const int val);\nint cl_reason_val(const char *str);\n\n//路径规划的原因\n#define ROUTE_REASON_VUI 0 //在VUI上启动规划\n#define ROUTE_REASON_OBU_RE_PATH 1 //OBU偏离路径,发生道路级重规划\n#define ROUTE_REASON_ADD_PLATOON 2 //OCT创建车队\n#define ROUTE_REASON_ADD_TAIL 3 //在车队尾部追加车辆\n#define ROUTE_REASON_JOIN_OBU 4 //在车队中间追加车辆\n#define ROUTE_REASON_OCT_CALL_CAR 5 //召车\n#define ROUTE_REASON_ETS_BLOCK 6 //施工标志封闭全部道路\n#define ROUTE_REASON_HEAD_CAR 7 //头车更新路径\nconst char *route_reason_str(const int val);\n\n//启动自动驾驶的原因\n#define START_REASON_VUI 0 //在VUI上切换为自动驾驶\n#define START_REASON_CSU 1 //CSU启动自动驾驶(如:脚本)\n#define START_REASON_RSU 2 //RSU启动自动驾驶(暂未使用)\n#define START_REASON_PLATOON 3 //编队启动自动驾驶\n#define START_REASON_CALL_CAR 4 //召车启动自动驾驶\nconst char *start_reason_str(const int val);\n\n//退出自动驾驶的原因\n#define STOP_REASON_VUI 0 //在VUI上切换为人工驾驶\n#define STOP_REASON_CSU 1 //CSU退出自动驾驶(如:脚本)\n#define STOP_REASON_RSU 2 //RSU退出自动驾驶(如:编队)\n#define STOP_REASON_ARRIVED 3 //到达目的地,obu切换为人工驾驶\n#define STOP_REASON_DEGRADE 4 //降级导致停车\n\nconst char *stop_reason_str(const int val);\n\n//换道方向\n#define CL_DIRECTION_NONE 0 //保持当前车道\n#define CL_DIRECTION_LEFT 1 //向左换道\n#define CL_DIRECTION_RIGHT 2 //向右换道\n#define CL_DIRECTION_BLOCK 3 //被阻死,无法换道\nconst char *cl_direction_str(const int val);\n\n//换道状态\n#define CL_STATUS_REQUEST 0 //单车认为可以换道\n#define CL_STATUS_BLOCKED 1 //单车认为有障碍,需要网络通知相关车辆避让\n#define CL_STATUS_RUNNING 2 //单车正在换道\n#define CL_STATUS_COMPLETE 3 //单车换道完成\n#define CL_STATUS_CANCEL 4 //单车换道过程中被取消\n#define CL_STATUS_CANCEL_COMP 5 //单车换道取消后回本道完成\nconst char *cl_status_str(const int val);\n\n//协作式协作式换道的状态,允许换道后就算协作式换道完成\n#define CLS_NONE 0 //所有车: 没在换道\n#define CLS_WAIT_SPACE 1 //主车: 等待腾出换道空间\n#define CLS_RUNNING 2 //主车: 正在换道或取消换道\n#define CLS_ADD_SPEED 3 //从车: 加速,以便为主车腾出换道空间\n#define CLS_DEC_SPEED 4 //从车: 减速,以便为主车腾出换道空间\n\n//超过多长时间取消协作式换道,毫秒\n#define MAX_CL_TIME (3 * 1000)\n\n//下发中心线的类型\n#define IS_NOT_REPLAN 0 //不重规划\n#define IS_REPLAN 1 //重规划\n\n//车辆默认行为\n#define DRIVE_BEHAVIOR_OVERTAKE 0 //允许车辆自主换道超车。\n#define DRIVE_BEHAVIOR_PFOLLOW 1 //车辆自身只能跟车,网络下发换道超车(默认取本值)。\n#define DRIVE_BEHAVIOR_PHEAD 2 //在车队里面的头车,目前不允许随意换道。\n#define DRIVE_BEHAVIOR_SIMULATE 3 //仿真车辆\n\n//车辆类型\n#define CAR_ACTION_NONE 0 //不允许出现的错误值\n#define CAR_ACTION_SINGLE 1 //非编队车辆\n#define CAR_ACTION_PLATOON_HEAD 2 //编队头车\n#define CAR_ACTION_PLATOON_OTHER 3 //编队跟车\n\n\n//----------------------------------------------------------\n// oct、vui的枚举值\n//----------------------------------------------------------\n\n//csu管理oct的状态机\n#define E_CSU_SESSION_OCT_DISCONN 0 //未连接\n#define E_CSU_SESSION_OCT_CONNECT 1 //已连接\n\n//obu管理vui的状态机\n#define E_OBU_SESSION_VUI_DISCONN 0 //未连接\n#define E_OBU_SESSION_VUI_CONNECT 1 //已连接\n\n//日志等级\n#define LOG_INFO 0 //普通日志\n#define LOG_WARNING 1 //警告日志\n#define LOG_ERROR 2 //错误日志\n#define LOG_FATAL 3 //致命错误日志\nconst char *log_str(const int val);\n\n//告警等级\n#define ALARM_INFO 0 //提示告警(灰色)\n#define ALARM_ADVISE 1 //建议告警(绿色)\n#define ALARM_WARNING 2 //警告告警(黄色)\n#define ALARM_DANGER 3 //危险告警(红色)\nconst char *alarm_str(const int val);\nint alarm_val(const char *str);\n\n//告警类型\n#define ALARM_TYPE_OCCUR 0 //产生告警(在VUI开始显示)\n#define ALARM_TYPE_RESTORE 1 //恢复告警(在VUI停止显示)\n#define ALARM_TYPE_EVENT 2 //事件告警(在VUI显示5秒后自动隐藏)\n\n//告警展示类型\n#define ALARM_SHOW_TEXT 0 //文本\n#define ALARM_SHOW_SOUND 1 //语音\n#define ALARM_SHOW_TEXT_SOUND 2 //文本加语音\n\n//产生告警的距离\n#define ALARM_DIS_LONG 100 // <=100米\n#define ALARM_DIS_MIDDLE 50 // <=50米\n#define ALARM_DIS_SHORT 20 // <=20米\n\n//修改ETS的原因\n#define ETS_REASON_DIALOG 0 //通过对话框修改\n#define ETS_REASON_TASK 1 //通过脚本修改\n#define ETS_REASON_CROSSING 2 //通过路口算法修改\nconst char *ets_reason_str(const int val);\n\n\n//----------------------------------------------------------\n// 车辆CAN的枚举值\n//----------------------------------------------------------\n\n//发动机状态\n#define ENGINE_STATUS_STOPPED 0 //停止\n#define ENGINE_STATUS_STALLED 1 //熄火\n#define ENGINE_STATUS_RUNNING 2 //运行\n#define ENGINE_STATUS_CRANKING 3 //启动\n\n//档位状态\n#define AT_STATUS_OFF 255\n#define AT_STATUS_P 5\n#define AT_STATUS_R 7\n#define AT_STATUS_N 0\n#define AT_STATUS_D 4\n#define AT_STATUS_M 8\n#define AT_STATUS_M1 9\n#define AT_STATUS_M2 10\n#define AT_STATUS_U 6\nconst char *at_status_str(const int val);\n\n//刹车信号\n#define BRAKE_STATUS_OFF 0 //未刹车\n#define BRAKE_STATUS_ON 1 //正在刹车\n\n//转向灯状态\n#define FLASHING_STATUS_NONE 0 //不亮\n#define FLASHING_STATUS_L 1 //左转向\n#define FLASHING_STATUS_R 2 //右转向\n#define FLASHING_STATUS_LR 3 //双闪\n\n//灯光状态\n#define BEAM_STATUS_OFF 0 //关灯\n#define BEAM_STATUS_ON 1 //开灯\n\n\n//----------------------------------------------------------\n// GPS、惯导的枚举值\n//----------------------------------------------------------\n\n//定位定姿状态,源自POS状态量的高2位\n#define GPS_FLAG_MA 0 //机械编排\n#define GPS_FLAG_KEEP 0 //保存整秒时刻状态\n#define GPS_FLAG_LC_BEGIN 1 //组合更新计算开始\n#define GPS_FLAG_LC_END 2 //组合解算完成\n\n//RTK状态,源自POS状态量的低6位\n#define GPS_N_SPP 0 //单点定位(纯靠惯导)\n#define GPS_N_FLOAT 1 //浮动解(300cm)\n#define GPS_N_FIXED 2 //固定解,精度最佳(20cm)\n#define GPS_N_NG 3 //失败解(纯靠惯导)\n#define GPS_N_STATIC 4 //静态单点定位(纯靠惯导)\n\n\n//----------------------------------------------------------\n// 编队枚举值\n//----------------------------------------------------------\n\n//车辆或编队的状态\n#define PLATOON_CONVERGE 0 //各个obu正在汇聚,未形成编队\n#define PLATOON_FIN 1 //obu已经在编队中\n#define PLATOON_ADD_TAIL 2 //obu尾部加入编队\n#define PLATOON_DROP_OUT_PRE 3 //obu准备退出编队\n#define PLATOON_DROP_OUT 4 //obu正在退出编队\n#define PLATOON_JOIN_PRE 5 //obu准备中间加入编队\n#define PLATOON_JOIN 6 //obu正在中间加入编队\n#define VAHICLE_FREE 7 //obu是自由车辆\n\n//调用汽院的PlatoonControl的返回值\n#define PLATOON_RET_ERR -1 //当前状态错误\n#define PLATOON_RET_DOING 0 //当前状态未执行完\n#define PLATOON_RET_OK 1 //执行完了\n\n\n//----------------------------------------------------------\n// VUI向上调用的枚举值\n//----------------------------------------------------------\n\n//命令名称\n#define UPCALL_UC_ADD_PLATOON_REQUEST 0 //创建编队\n#define UPCALL_UC_SET_PLATOON_REQUEST 1 //修改编队\n#define UPCALL_UC_DELETE_PLATOON_REQUEST 2 //解散编队\n#define UPCALL_UC_CALL_CAR_REQUEST 3 //召车\nconst char *upcall_str(const int val);\nbool is_my_upcall(const int val);\n\n\n//----------------------------------------------------------\n// 协助式换道角色的枚举值\n//----------------------------------------------------------\n#define COOPERATE_NONE 0 //非协助式车辆\n#define COOPERATE_MAIN 1 //请求协助换道的车辆\n#define COOPERATE_FRONT 2 //辅助协助换道前方车辆\n#define COOPERATE_BACK 3 //辅助协助换道后方车辆\n//#define COOPERATE_DONE 4 //辅助协助换道成功后保持\n\n\n//----------------------------------------------------------\n// 其他枚举值\n//----------------------------------------------------------\n\n//坐标转换\nconst double pi = 3.14159265358979324;\nconst double a = 6378245.0;\nconst double ee = 0.00669342162296594323;\nconst double x_pi = 3.14159265358979324 * 3000.0 / 180.0;\n\n//障碍物上报只有5秒的生命周期,如果5秒后不再报这个障碍物,就认为移除了\n#define MAX_OBSTACLE_TTL 5000\n\n//----------------------------------------------------------\n//降级驾驶相关\n//----------------------------------------------------------\n\n//车辆降级状态\n#define DG_ST_NORMAL 0\n#define DG_ST_CONTINUE 1\n#define DG_ST_STOPCAR 2\n\n//降级原因\n#define DG_RTK_DOWN 1\n#define DG_ROAD_LIMIT 2\n#define DG_OTHER 3\n#define DG_CONTROL 4\n\n//查找路径的方式\n#define TASK_FR_NORMAL 0\n#define TASK_FR_CALLCAR 1\n#define TASK_FR_BLOCK_A 2//block导致的重规划\n#define TASK_FR_BLOCK_B 3\n\n//controller上报的EPB的状态,\n#define EPB_ENABLE 1 //表示EPB拉起来,刹车\n#define EPB_DISABLE 0 //表示EPB没有拉起来,没有刹车\n#endif\n"
},
{
"alpha_fraction": 0.503311276435852,
"alphanum_fraction": 0.5119205117225647,
"avg_line_length": 18.113924026489258,
"blob_id": "547cf00ced248a652b43848ef84f9d0ce1fd7d57",
"content_id": "22cc14ddeef3126205a75edfb70ce5ac1c83535a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1930,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 79,
"path": "/athena/core/x86/Common/include/distributed_runtime/starter/nad_starter.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_starter.h\n * 时 间:2016-03-02\n * 描 述:所有进程的公共启动器基类\n-------------------------------------------------------*/\n#ifndef _NAD_STARTER_H\n#define _NAD_STARTER_H\n\n#include \"config/nad_config.h\"\n\n//每个进程公共的启动程序\nclass nad_starter\n{\nprivate:\n\npublic:\n nad_config *m_config; //配置文件\n\npublic:\n //构造析构函数\n nad_starter();\n virtual ~nad_starter();\n\n //设置退出信号陷阱\n void set_grace_stop();\n\n //退出进程\n void do_grace_stop();\n\n //处理参数\n virtual void do_arg(int argc, char *argv[]);\n\n //显示一些启动信息\n void do_show();\n\n //配置初始化\n int config_init();\n\n //退出配置\n void config_free();\n\n //获取配置\n nad_config* get_config();\n\n /**\n * 在start()中: 执行系统初始化,调用user_start(),然后进入系统运行期的死循环\n * 在stop()中: 调用user_stop(),执行系统的退出操作,然后退出程序\n * 所以用户进程通常需要:\n * 1.定义自己的启动器类\n * class obu_planning_starter : public nad_start\n * {\n * int user_start() {...}\n * void user_stop() {...}\n * }\n * 2.进程的main函数通常是这样:\n * int main()\n * {\n * nad_starter *starter = new obu_planning_starter();\n * starter->start(); //程序启动后在此函数内死循环运行\n * starter->stop();\n * }\n */\n virtual int start(int argc, char *argv[]);\n virtual void stop();\n\npublic:\n //需要进程自己实现的函数\n virtual int user_start() = 0; //启动进程私有处理\n virtual void user_stop() = 0; //退出进程私有处理\n\n //同步时间\n virtual void set_timer() = 0;\n};\n\n//收到ctrl+c后软退出系统\nextern int grace_stop;\n\n\n#endif\n"
},
{
"alpha_fraction": 0.5773195624351501,
"alphanum_fraction": 0.6659793853759766,
"avg_line_length": 22.658536911010742,
"blob_id": "e68921bbd571d1c1cd69396a69b887bbec059fbb",
"content_id": "e715715971d77e7fddc6ba8a9e76d00412b3d5c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 1190,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 41,
"path": "/athena/cc/camera/lane_detect/line_config.ini",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#组播地址\nlcm_url = udpm://239.255.76.22:7622?ttl=3\n#Camera参数文件\n#银色AX7 pointgrey相机:camera_720P_sliver_pointgrey.ini\n#银色AX7 basler相机:camera_720P_sliver_basler.ini\n#2号车basler相机:camera_720P_2_basler.ini \n#mini相机:camera_720P_sliver_mini.ini \ncamera_ini =camera_720P_sliver_pointgrey.ini\n#本地photo路径\nlocal_photo_path =/home/dhx/log/LOG/Photo\n#local_photo_path = /media/nvidia/e59fb8f4-dd4c-4b45-8614-54f8739aea2b/photo\n#程序运行方式 1.读取本地图片 2.读取basler相机 3.读取pointgery相机 4.mini相机\nrun_mode = 1;\n#是否写文件\nwrite_file = 0;\n#是否标定\ncalibration = 0;\n#是否现实鸟瞰图\nshowRoadImage =0;\n#wait_key\nwait_key = 0;\n#垂直方向mm/pix\nm_per_pix_i = 56.98;\n#水平方向mm/pix\nm_per_pix_j = 41.07;\n#basler亮度\nbasler_brightness = 0.4 \n#pointgrey亮度\npt_brightness = 1.5 \n#曝光时间\npt_shutter = 50 \n#增益\npt_gain = 10.5 \n#Gamma\npt_gamma 1.5 \n#白平衡的红色通道\npt_white_balanceA = 500 \n#白平衡的蓝色通道\npt_white_balanceB = 850 \n#mini摄像头读取序列号 \nnumber = 1\n"
},
{
"alpha_fraction": 0.6837209463119507,
"alphanum_fraction": 0.6976743936538696,
"avg_line_length": 29.571428298950195,
"blob_id": "5e45721c840da77e46ee1395d75c35c607be4d10",
"content_id": "79ae9abdeaa8e94ee19772d26089739f0ed4d638",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 295,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 7,
"path": "/athena/core/x86/Camera/lane_detect/readme.txt",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "lanelibTest: 使用so库检测车道线的example\ndoxgen: API文档 (打开index.html文件)\nAPI工具包: \ncamera_720P_sliver_pointgrey.ini 相机标定文件\nline_config.ini \t 程序配置文件\nliblanedetect_lib.so so动态链接库\nlane_utils.h\t\t\t 头文件\n\n"
},
{
"alpha_fraction": 0.7264150977134705,
"alphanum_fraction": 0.7264150977134705,
"avg_line_length": 12.125,
"blob_id": "87b4174f8cc0df2ed6e61b5b0c503d08fb2131f6",
"content_id": "b522cd4cf155c8ecdb14f9b8224655c1ff7b89e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 106,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 8,
"path": "/athena/core/x86/Map/include/RoadMap.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _ROAD_MAP_H\n#define _ROAD_MAP_H\n\n#include \"LaneletMap.hpp\"\"\n\ntypedef LaneletMap RoadMap;\n\n#endif\n\n"
},
{
"alpha_fraction": 0.5177415609359741,
"alphanum_fraction": 0.5485382676124573,
"avg_line_length": 25.993976593017578,
"blob_id": "721c3109c11dd2e018771c29fb7f845a276029a2",
"content_id": "8df47141cbf58e604d9bcc0edce8c40bdf96d42f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 5825,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 166,
"path": "/athena/docs/README.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "# 欢迎来到Athena\nAthena(雅典娜)是环宇智行推出的开放的、轻巧灵活和高可靠的自动驾驶软件平台。Athena 1.0包含地图、导航、感知、规划、控制、仿真平台和手机app等模块,支持完整的单车自动驾驶。Athena 2.0将加入网联自动驾驶部分。 \n接下来将介绍Athena的软件架构以及如何在个人pc或环宇的自动驾驶硬件平台——titan3上使用我们的Athena系统。\n\n<!-- TOC -->\n\n- [欢迎来到Athena](#欢迎来到athena)\n - [1. 软件架构](#1-软件架构)\n - [1.1. 核心库](#11-核心库)\n - [1.2. 实例](#12-实例)\n - [2. 使用教程](#2-使用教程)\n - [2.1. 系统要求](#21-系统要求)\n - [2.2. 安装依赖库](#22-安装依赖库)\n - [2.3. 拷贝动态库文件](#23-拷贝动态库文件)\n - [2.4. 运行仿真程序](#24-运行仿真程序)\n - [2.5. 运行自动驾驶程序](#25-运行自动驾驶程序)\n - [2.5.1. x86核下运行](#251-x86核下运行)\n - [2.5.2. arm核下运行](#252-arm核下运行)\n\n<!-- /TOC -->\n\n## 1. 软件架构\n```\n.\n|-- README.md\n|-- athena\n| |-- install_arm.sh\n| |-- install_x86.sh\n| |-- cc\n| | |-- camera\n| | |-- planning\n| |-- core\n| | |-- arm\n| | |-- x86\n| |-- docs\n| | |-- readme\n| |-- examples\n| | |-- readme\n| | |-- LCM\n| | |-- ROS\n| | |-- titan3\n| |-- python\n| |-- readme\n|-- data\n| |-- readme\n|-- third_party\n| |-- glog-master.zip\n| |-- install.sh\n| |-- lcm-1.3.1.zip\n| |-- libsodium-1.0.3.tar.gz\n| |-- readme\n| |-- zeromq-4.1.2.tar.gz\n|-- tools\n |-- readme\n```\n\n<div align=center><img src=\"images/athena.jpg\"/></div> \n\n### 1.1. 核心库\n/athena/core/目录下放置了Athena各模块的核心库文件,包括x86的库文件(/athena/core/x86)和arm的库文件(/athena/core/arm)。 \n以arm/Planning为例,m目录结果如下:\n```\n.\n|-- Planning\n |-- include\n | |-- collision_check\n | | |-- collision_check.h\n | |-- common\n | | |-- LocalGeographicCS.hpp\n | | |-- car_state.h\n | | |-- color_util.h\n | | |-- convert_coordinates.hpp\n | | |-- cs.h\n | | |-- enum_list.h\n | | |-- math_util.h\n | | |-- navi_point.h\n | | |-- path.h\n | | |-- path_tools.h\n | | |-- point.h\n | | |-- rect.h\n | |-- map_matching\n | | |-- map_matching.h\n | |-- park\n | | |-- park.h\n | |-- planning\n | | |-- planning.h\n | | |-- planning_output.h\n | | |-- planning_param.h\n | | |-- route_data.h\n | |-- spline\n | | |-- math_tools.h\n | | |-- quartic_spline.h\n | | |-- quintic_spline.h\n | | |-- spline.h\n | |-- trajectory\n | | |-- trajectory.h\n | | |-- trajectory_sets.h\n | |-- vehicle_dynamic\n | |-- cau_heading_steering.h\n | |-- circle.h\n | |-- heading.h\n | |-- nearest_point_on_spline.h\n | |-- steering_angle.h\n |-- lib\n |-- libplanning.so\n```\ninclude文件夹下放置里所以头文件,lib文件夹下为arm核的动态库文件。 \n \n### 1.2. 实例\n/athena/examples/目录下放置了基于Athena核心库编写的实例,目前我们提供了基于LCM框架编写的x86核的实例(/athena/examples/LCM)以及适配与titan3控制器的实例(/athena/examples/titan3)。 \n\n## 2. 使用教程\n### 2.1. 系统要求\nubuntu系统\n### 2.2. 安装依赖库\n<table><tr><td bgcolor=#D1EEEE>cd third_party/</td></tr></table> \n<table><tr><td bgcolor=#D1EEEE>install.sh</td></tr></table> \n\n<font color=#A52A2A>推荐安装的第三方库的版本:</font> \nlcm : 1.3.1 \nzeromq : 4.1.2 \nlibsodium : 1.0.3 \n\n### 2.3. 拷贝动态库文件\n进入目录\n<table><tr><td bgcolor=#D1EEEE>cd athena/</td></tr></table> \n如果是在x86核上使用,运行脚本install_x86.sh \n<table><tr><td bgcolor=#D1EEEE>./install_x86.sh</td></tr></table> \n如果是在arm核上使用,运行脚本install_arm.sh \n<table><tr><td bgcolor=#D1EEEE>./install_arm.sh</td></tr></table>\n\n### 2.4. 运行仿真程序\n仿真平台需要在x86核ubuntu系统下运行 \n<table><tr><td bgcolor=#D1EEEE>cd athena/examples/LCM/SIM/Unity_Sim/</td></tr></table> \n运行run \n\n<div align=center><img src=\"images/run_unity.png\"/></div> \n \n选择分辨率和画面质量,点击OK \n\n<div align=center><img src=\"images/unity.png\"/></div> \n\n### 2.5. 运行自动驾驶程序\n#### 2.5.1. x86核下运行\n<table><tr><td bgcolor=#D1EEEE>cd athena/examples/LCM/Singlecar/launch/bin/</td></tr></table> \n启动程序:运行脚本 \n<table><tr><td bgcolor=#D1EEEE>./launch.sh</td></tr></table> \n找到sim_vui的终端,在终端界面上点击键盘:d/e/f/g/i/j/分别选择不同的目的地:起点/站点A/站点B/站点C/站点D/终点/ \n \n<div align=center><img src=\"images/sim_vui.png\"/></div> \n \n程序规划出到达所选目的地的最优路线,并控制车辆按规划出的路径行驶 \n\n<div align=center><img src=\"images/launch.gif\"/></div> \n \n关闭程序:运行脚本 \n<table><tr><td bgcolor=#D1EEEE>./ds.sh</td></tr></table> \n\n#### 2.5.2. arm核下运行\n<table><tr><td bgcolor=#D1EEEE>cd athena/examples/titan3/launch/bin/</td></tr></table> \n运行启动脚本 \n<table><tr><td bgcolor=#D1EEEE>./launch.sh</td></tr></table> \n其他步骤与x86核下一致 \n<div align=center><img src=\"images/titan3.gif\"/></div> \n \n<font color=#A52A2A>注意:本系统支持分布式运行,如果仿真程序和自动驾驶程序不在同一台机器上运行(如仿真程序运行在个人电脑上,自动驾驶程序运行在titan3控制器上),只需保证不同机器连接在同一局域网内即可!</font>\n"
},
{
"alpha_fraction": 0.6723110675811768,
"alphanum_fraction": 0.6776418685913086,
"avg_line_length": 24.511999130249023,
"blob_id": "34278a8051574e84b13d62662a9b92020b34cb7a",
"content_id": "92f1cf50761ead13a43ebc0d25c6c90b9bdc1112",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3197,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 125,
"path": "/athena/examples/LCM/Singlecar/control/apps/control_view/control_view.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control_view.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_CONTROL_VIEW_CONTROL_VIEW_H_\n#define APPS_CONTROL_VIEW_CONTROL_VIEW_H_\n\n#include <GL/glu.h>\n#include <GL/glut.h>\n\n//#include \"controller.h\"\n#include \"../../control_logic/control_logic.h\"\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class ControlView\n * @brief 调试窗口.\n */\nclass ControlView{\n public:\n #define KEY_ESC 27\n ControlView() = default;\n ~ControlView() = default;\n\n static void DrawPath();\n\n static void DrawText(float x, float y, float z, char * outputstring);\n static void DrawTextRGB(float x, float y, float z, float r, float g, float b, char * outputstring);\n\n static void OpenGL_Draw();\n\n static void MyGLDispIni();\n static void myDisplay();\n\n static void SpecialKey(int key, int x, int y);\n static void MouseKey(int button, int state, int x, int y);\n static void MouseRotate(int x, int y, int z);\n static void PassiveMouseMove(int x, int y);\n static void MouseMove(int x, int y);\n static void Reshape(int w, int h);\n static void Draw_Best_March_Point(path& p, map_matching& matching);\n static void Key(unsigned char key, int x, int y);\n\n static void Init(ControlLogic*control_logic,double vehicle_width);\n\n\n static void Draw_Org();\n static void Draw_Point(double x, double y);\n static void DrawCar(double x, double y, double yaw, double steer_angle );\n static void DrawCar_e(double x, double y, double x_c, double y_c, double yaw, double yaw_c);\n\n static void Draw_Virtual_lane(path& v_p,int output_st_pos,int output_en_pos,int every,int r, int g, int b);\n static void Draw_Best_March_Point(path& p, int no_points);\n\n private:\n static int x_lbefore;\n static int y_lbefore;\n static int x_rbefore;\n static int y_rbefore;\n static int z_before1;\n static int z_before2;\n\n static bool buttonSaveLeft;\n static bool buttonSaveMiddle;\n static bool buttonSaveRight;\n static float x_move;\n static float y_move;\n static float z_move;\n static float x_move_save;\n static float y_move_save;\n static float z_move_save;\n static float x_rotate;\n static float y_rotate;\n static float z_rotate;\n static float x_rotate_save;\n static float y_rotate_save;\n static float z_rotate_save;\n static float m_zoom;\n\n static float m_aspect;\n\n static float m_eyex;\n static float m_eyey;\n static float m_eyez;\n static float m_eyeyaw;\n static float m_centerx;\n static float m_centery;\n static float m_centerz;\n static float m_upx;\n static float m_upy;\n static float m_upz;\n static int g_frame;\n static bool g_pause;\n static double front_track;\n static double front_wheel_wide;\n static double back_wheel_wide;\n\n static double vehicle_width_;\n\n static int32_t match_point_num_;\n static double position_x_c;\n static double position_y_c;\n static double yaw_c;\n\n static ControlLogic *control_logic_;\n static DebugOutput debug_output_;\n\n static double tar_speed_debug_;\n};\n}\n}\n\n#endif //APPS_CONTROL_VIEW_CONTROL_VIEW_H_\n"
},
{
"alpha_fraction": 0.6149914860725403,
"alphanum_fraction": 0.6314594149589539,
"avg_line_length": 20.740739822387695,
"blob_id": "6736503010abd24739b5d9062c45b71a2858aa3a",
"content_id": "27c4b062b913e72488df4b81efb6e22c4e90f5eb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1819,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 81,
"path": "/athena/examples/LCM/Singlecar/control/apps/track_trajectory/track_trajectory.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file track_trajectory.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_TRACK_TRAJECTORY_H_\n#define APPS_TRACK_TRAJECTORY_H_\n\n#include \"common/path.h\"\n#include \"common/math_util.h\"\n#include \"common/map_matching/spline.h\"\n#include \"common/map_matching/heading.h\"\n#include \"common/map_matching//steering_angle.h\"\n#include \"../../control_logic/control_logic.h\"\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class Control\n * @brief 控制类.\n */\nclass TrackTrajectory{\n public:\n TrackTrajectory() = default;\n ~TrackTrajectory() = default;\n\n /**\n * @brief 初始化\n * @param[in] local_trajectory_path 本地轨迹路径..\n * @param[in] controller_config 控制器配置.\n * @return void.\n */\n void Init(string local_trajectory_path,ControlLogic *control_logic);\n\n private:\n const double const_speed_ = 8.0;\n const double steering_cau_one_point_ = 2.0;\n const double steering_cau_two_point_ = 4.0;\n const int32_t mark_every_ = 10;\n const double spline_every_ = 0.1;\n const double kp_slope_ = 30.0;\n const double kp_value_ = 2.0;\n const double tar_speed_ = 0;\n path recv_path_;\n ///轨迹\n Trajectory trajectory_;\n ControlLogic *control_logic_;\n\n void CauAllOutputFromSingleSpline(\n path& p,\n int no_points,\n double speed);\n\n\n void CauPathFromSpline(path& p, int no_points);\n\n\n void SplineKp(path& p,double speed,int length );\n\n /**\n * @brief 轨迹转换\n * @param[in] path 本地路径..\n * @param[in] trajectory 轨迹.\n * @return void.\n */\n void PathToTrajectory(path &path,Trajectory &trajectory);\n};\n}\n}\n\n#endif //APPS_TRACK_TRAJECTORY_H_\n"
},
{
"alpha_fraction": 0.6611327528953552,
"alphanum_fraction": 0.6624879837036133,
"avg_line_length": 54.68867874145508,
"blob_id": "3f93b6158554a6487b77d21f373af76761b721a6",
"content_id": "c467a2eb3838f247937b830bbb5714433b6a7d3a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 17963,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 318,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/msg/nad_msg.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_msg.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:引用了msg目录下的所有头文件\n-------------------------------------------------------*/\n#ifndef _NAD_MSG_H\n#define _NAD_MSG_H\n\n//网元内消息\n#include <lcm/lcm-cpp.hpp>\n\n//网元间消息\n#include \"ne_msg/ne_lcm.hpp\"\n\n//网络侧lcm消息列表\nusing namespace nad_lcm;\n\n//定时器\n#include \"timer/nad_timer.h\"\n\n//SESSION(控制块)\n#include \"session/nad_session.h\"\n\n//ZeroMQ,注:hpp文件中有static函数,所以不默认包含\n//#include \"zmq/zmq_t.h\"\n\n//用于发消息\nextern NE_LCM *g_lcm;\n\n\n//nad_lcm中包含的消息文件\n#include \"nad_lcm/block_info.hpp\"\n#include \"nad_lcm/center_line.hpp\"\n#include \"nad_lcm/center_point.hpp\"\n#include \"nad_lcm/lite_center_point.hpp\"\n#include \"nad_lcm/co_rsu_name_respond.hpp\"\n#include \"nad_lcm/cr_add_ets_request.hpp\"\n#include \"nad_lcm/cr_add_platoon_request.hpp\"\n#include \"nad_lcm/cr_delete_ets_request.hpp\"\n#include \"nad_lcm/cr_delete_platoon_request.hpp\"\n#include \"nad_lcm/cr_exec_task_func_request.hpp\"\n#include \"nad_lcm/cr_info_report.hpp\"\n#include \"nad_lcm/cr_obu_login_respond.hpp\"\n#include \"nad_lcm/cr_obu_logout_notify.hpp\"\n#include \"nad_lcm/cr_route_respond.hpp\"\n#include \"nad_lcm/cr_rsu_login_respond.hpp\"\n#include \"nad_lcm/cr_rsu_logout_notify.hpp\"\n#include \"nad_lcm/cr_set_ets_request.hpp\"\n#include \"nad_lcm/cr_set_platoon_request.hpp\"\n#include \"nad_lcm/cr_start_auto_respond.hpp\"\n#include \"nad_lcm/cu_add_ets_respond.hpp\"\n#include \"nad_lcm/cu_add_platoon_respond.hpp\"\n#include \"nad_lcm/cu_alarm_report.hpp\"\n#include \"nad_lcm/cu_config_respond.hpp\"\n#include \"nad_lcm/cu_delete_ets_respond.hpp\"\n#include \"nad_lcm/cu_delete_platoon_notify.hpp\"\n#include \"nad_lcm/cu_exec_task_respond.hpp\"\n#include \"nad_lcm/cu_info_report.hpp\"\n#include \"nad_lcm/cu_log_report.hpp\"\n#include \"nad_lcm/cu_oct_login_respond.hpp\"\n#include \"nad_lcm/cu_set_ets_respond.hpp\"\n#include \"nad_lcm/cu_set_platoon_respond.hpp\"\n#include \"nad_lcm/cu_stop_auto_notify.hpp\"\n#include \"nad_lcm/cu_stop_task_respond.hpp\"\n#include \"nad_lcm/dr_info_report.hpp\"\n#include \"nad_lcm/key_point.hpp\"\n#include \"nad_lcm/lane_of_route.hpp\"\n#include \"nad_lcm/light_info.hpp\"\n#include \"nad_lcm/limspeed_info.hpp\"\n#include \"nad_lcm/map_point.hpp\"\n#include \"nad_lcm/obstacle_info.hpp\"\n#include \"nad_lcm/mo_change_lane_request.hpp\"\n#include \"nad_lcm/obu_command.hpp\"\n#include \"nad_lcm/obu_config.hpp\"\n#include \"nad_lcm/obu_info.hpp\"\n#include \"nad_lcm/oc_rsu_name_request.hpp\"\n#include \"nad_lcm/or_change_lane_request.hpp\"\n#include \"nad_lcm/or_info_report.hpp\"\n#include \"nad_lcm/or_obu_login_request.hpp\"\n#include \"nad_lcm/or_route_request.hpp\"\n#include \"nad_lcm/or_start_auto_request.hpp\"\n#include \"nad_lcm/or_stop_auto_notify.hpp\"\n#include \"nad_lcm/om_change_lane_respond.hpp\"\n#include \"nad_lcm/ou_add_platoon_notify.hpp\"\n#include \"nad_lcm/ou_alarm_report.hpp\"\n#include \"nad_lcm/ou_delete_platoon_notify.hpp\"\n#include \"nad_lcm/ou_log_report.hpp\"\n#include \"nad_lcm/ou_route_respond.hpp\"\n#include \"nad_lcm/ou_set_platoon_notify.hpp\"\n#include \"nad_lcm/ou_start_auto_respond.hpp\"\n#include \"nad_lcm/ou_stop_auto_respond.hpp\"\n#include \"nad_lcm/platoon_info.hpp\"\n#include \"nad_lcm/rc_add_ets_respond.hpp\"\n#include \"nad_lcm/rc_add_platoon_respond.hpp\"\n#include \"nad_lcm/rc_alarm_report.hpp\"\n#include \"nad_lcm/rc_delete_ets_respond.hpp\"\n#include \"nad_lcm/rc_delete_platoon_notify.hpp\"\n#include \"nad_lcm/rc_exec_task_func_respond.hpp\"\n#include \"nad_lcm/rc_info_report.hpp\"\n#include \"nad_lcm/rc_log_report.hpp\"\n#include \"nad_lcm/rc_obu_login_request.hpp\"\n#include \"nad_lcm/rc_obu_logout_notify.hpp\"\n#include \"nad_lcm/rc_route_request.hpp\"\n#include \"nad_lcm/rc_route_respond.hpp\"\n#include \"nad_lcm/rc_rsu_login_request.hpp\"\n#include \"nad_lcm/rc_set_ets_respond.hpp\"\n#include \"nad_lcm/rc_set_platoon_respond.hpp\"\n#include \"nad_lcm/rc_start_auto_request.hpp\"\n#include \"nad_lcm/rc_stop_auto_notify.hpp\"\n#include \"nad_lcm/route_planning.hpp\"\n#include \"nad_lcm/ro_add_platoon_notify.hpp\"\n#include \"nad_lcm/ro_alarm_report.hpp\"\n#include \"nad_lcm/ro_change_lane_respond.hpp\"\n#include \"nad_lcm/ro_delete_platoon_notify.hpp\"\n#include \"nad_lcm/ro_info_report.hpp\"\n#include \"nad_lcm/ro_log_report.hpp\"\n#include \"nad_lcm/ro_obu_login_respond.hpp\"\n#include \"nad_lcm/ro_obu_logout_notify.hpp\"\n#include \"nad_lcm/ro_route_respond.hpp\"\n#include \"nad_lcm/ro_set_platoon_notify.hpp\"\n#include \"nad_lcm/ro_start_auto_respond.hpp\"\n#include \"nad_lcm/ro_stop_auto_respond.hpp\"\n#include \"nad_lcm/rsd_config.hpp\"\n#include \"nad_lcm/rsd_info.hpp\"\n#include \"nad_lcm/rsd_sensor_info.hpp\"\n#include \"nad_lcm/rsu_config.hpp\"\n#include \"nad_lcm/rsu_info.hpp\"\n#include \"nad_lcm/sensor_obstacle_report.hpp\"\n#include \"nad_lcm/task_config.hpp\"\n#include \"nad_lcm/task_info.hpp\"\n#include \"nad_lcm/uc_add_ets_request.hpp\"\n#include \"nad_lcm/uc_add_platoon_request.hpp\"\n#include \"nad_lcm/uc_config_request.hpp\"\n#include \"nad_lcm/uc_delete_ets_request.hpp\"\n#include \"nad_lcm/uc_delete_platoon_request.hpp\"\n#include \"nad_lcm/uc_exec_task_request.hpp\"\n#include \"nad_lcm/uc_oct_login_request.hpp\"\n#include \"nad_lcm/uc_set_ets_request.hpp\"\n#include \"nad_lcm/uc_set_platoon_request.hpp\"\n#include \"nad_lcm/uc_stop_task_request.hpp\"\n#include \"nad_lcm/uo_route_request.hpp\"\n#include \"nad_lcm/uo_start_auto_request.hpp\"\n#include \"nad_lcm/uo_stop_auto_request.hpp\"\n#include \"nad_lcm/uc_call_car_request.hpp\"\n#include \"nad_lcm/cu_call_car_respond.hpp\"\n#include \"nad_lcm/route_line_point.hpp\"\n#include \"nad_lcm/om_center_line_report.hpp\"\n#include \"nad_lcm/om_info_report.hpp\"\n#include \"nad_lcm/ro_vui_report.hpp\"\n#include \"nad_lcm/ou_vui_report.hpp\"\n#include \"nad_lcm/uo_upcall_request.hpp\"\n#include \"nad_lcm/or_upcall_request.hpp\"\n#include \"nad_lcm/rc_upcall_request.hpp\"\n#include \"nad_lcm/co_obu_info.hpp\"\n#include \"nad_lcm/key_point_info.hpp\"\n#include \"nad_lcm/qos_info.hpp\"\n#include \"nad_lcm/ro_sensor_on_off.hpp\"\n#include \"nad_lcm/os_sensor_on_off.hpp\"\n#include \"nad_lcm/ro_qos_request.hpp\"\n#include \"nad_lcm/or_qos_respond.hpp\"\n#include \"nad_lcm/ne_info.hpp\"\n#include \"nad_lcm/mo_degrade_request.hpp\"\n#include \"nad_lcm/or_degrade_request.hpp\"\n#include \"nad_lcm/line_xys.hpp\"\n#include \"nad_lcm/point_m.hpp\"\n#include \"nad_lcm/point_xys.hpp\"\n#include \"nad_lcm/route_planning_m.hpp\"\n#include \"nad_lcm/om_route_respond.hpp\"\n\n#include \"nad_lcm/uc_info_report.hpp\"\n#include \"nad_lcm/cu_call_park_info_report.hpp\"//oct 召车泊车\n#include \"nad_lcm/uc_call_park_car_request.hpp\"//oct 召车泊车\n#include \"nad_lcm/cu_call_park_car_respond.hpp\"//oct 召车泊车\n\n\n#include \"nad_lcm/oc_vui_report.hpp\"\n\n//obu_lcm包含的消息列表\n#include \"obu_lcm/control_info_report.hpp\"\n#include \"obu_lcm/nav_points.hpp\"\n#include \"obu_lcm/lateral_control_info.hpp\"\n#include \"obu_lcm/lateral_control_vui_info.hpp\"\n#include \"obu_lcm/esr_data_list.hpp\"\n#include \"obu_lcm/esr_data_t.hpp\"\n#include \"obu_lcm/ins_info.hpp\"\n#include \"obu_lcm/CAN_value.hpp\"\n#include \"obu_lcm/back_coordinate_XYH.hpp\"\n#include \"obu_lcm/mt_info_report.hpp\"\n//构造消息调试信息\nstring log_rc_rsu_login_request( const nad_lcm::ne_msg_t<nad_lcm::rc_rsu_login_request > *msg);\nstring log_cr_rsu_login_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_rsu_login_respond > *msg);\nstring log_cr_rsu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::cr_rsu_logout_notify > *msg);\nstring log_oc_rsu_name_request( const nad_lcm::ne_msg_t<nad_lcm::oc_rsu_name_request > *msg);\nstring log_co_rsu_name_respond( const nad_lcm::ne_msg_t<nad_lcm::co_rsu_name_respond > *msg);\nstring log_or_obu_login_request( const nad_lcm::ne_msg_t<nad_lcm::or_obu_login_request > *msg);\nstring log_rc_obu_login_request( const nad_lcm::ne_msg_t<nad_lcm::rc_obu_login_request > *msg);\nstring log_cr_obu_login_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_obu_login_respond > *msg);\nstring log_ro_obu_login_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_obu_login_respond > *msg);\nstring log_cr_obu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::cr_obu_logout_notify > *msg);\nstring log_rc_obu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::rc_obu_logout_notify > *msg);\nstring log_ro_obu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_obu_logout_notify > *msg);\nstring log_uo_route_request( const nad_lcm::uo_route_request *msg);\nstring log_or_route_request( const nad_lcm::ne_msg_t<nad_lcm::or_route_request > *msg);\nstring log_rc_route_request( const nad_lcm::ne_msg_t<nad_lcm::rc_route_request > *msg);\nstring log_cr_route_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_route_respond > *msg);\nstring log_ro_route_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_route_respond > *msg);\nstring log_rc_route_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_route_respond > *msg);\nstring log_ou_route_respond( const nad_lcm::ou_route_respond *msg);\nstring log_uo_start_auto_request( const nad_lcm::uo_start_auto_request *msg);\nstring log_or_start_auto_request( const nad_lcm::ne_msg_t<nad_lcm::or_start_auto_request > *msg);\nstring log_rc_start_auto_request( const nad_lcm::ne_msg_t<nad_lcm::rc_start_auto_request > *msg);\nstring log_cr_start_auto_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_start_auto_respond > *msg);\nstring log_ro_start_auto_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_start_auto_respond > *msg);\nstring log_ou_start_auto_respond( const nad_lcm::ou_start_auto_respond *msg);\nstring log_uo_stop_auto_request( const nad_lcm::uo_stop_auto_request *msg);\nstring log_or_stop_auto_notify( const nad_lcm::ne_msg_t<nad_lcm::or_stop_auto_notify > *msg);\nstring log_rc_stop_auto_notify( const nad_lcm::ne_msg_t<nad_lcm::rc_stop_auto_notify > *msg);\nstring log_cu_stop_auto_notify( const nad_lcm::ne_msg_t<nad_lcm::cu_stop_auto_notify > *msg);\nstring log_ro_stop_auto_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_stop_auto_respond > *msg);\nstring log_ou_stop_auto_respond( const nad_lcm::ou_stop_auto_respond *msg);\nstring log_dr_info_report( const nad_lcm::ne_msg_t<nad_lcm::dr_info_report > *msg);\nstring log_or_info_report( const nad_lcm::ne_msg_t<nad_lcm::or_info_report > *msg);\nstring log_rc_info_report( const nad_lcm::ne_msg_t<nad_lcm::rc_info_report > *msg);\nstring log_cu_info_report( const nad_lcm::ne_msg_t<nad_lcm::cu_info_report > *msg);\nstring log_cr_info_report( const nad_lcm::ne_msg_t<nad_lcm::cr_info_report > *msg);\nstring log_mo_change_lane_request( const nad_lcm::mo_change_lane_request *msg);\nstring log_om_center_line_report( const nad_lcm::om_center_line_report *msg);\nstring log_or_change_lane_request( const nad_lcm::ne_msg_t<nad_lcm::or_change_lane_request > *msg);\nstring log_ro_change_lane_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_change_lane_respond > *msg);\nstring log_om_change_lane_respond( const nad_lcm::om_change_lane_respond *msg);\nstring log_uc_add_platoon_request( const nad_lcm::uc_add_platoon_request *msg);\nstring log_cr_add_platoon_request( const nad_lcm::ne_msg_t<nad_lcm::cr_add_platoon_request > *msg);\nstring log_ro_add_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_add_platoon_notify > *msg);\nstring log_ou_add_platoon_notify( const nad_lcm::ou_add_platoon_notify *msg);\nstring log_rc_add_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_add_platoon_respond > *msg);\nstring log_cu_add_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_add_platoon_respond > *msg);\nstring log_uc_set_platoon_request( const nad_lcm::uc_set_platoon_request *msg);\nstring log_cr_set_platoon_request( const nad_lcm::ne_msg_t<nad_lcm::cr_set_platoon_request > *msg);\nstring log_ro_set_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_set_platoon_notify > *msg);\nstring log_ou_set_platoon_notify( const nad_lcm::ou_set_platoon_notify *msg);\nstring log_rc_set_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_set_platoon_respond > *msg);\nstring log_cu_set_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_set_platoon_respond > *msg);\nstring log_uc_delete_platoon_request(const nad_lcm::uc_delete_platoon_request *msg);\nstring log_cr_delete_platoon_request(const nad_lcm::ne_msg_t<nad_lcm::cr_delete_platoon_request> *msg);\nstring log_rc_delete_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::rc_delete_platoon_notify > *msg);\nstring log_cu_delete_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::cu_delete_platoon_notify > *msg);\nstring log_ro_delete_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_delete_platoon_notify > *msg);\nstring log_ou_delete_platoon_notify( const nad_lcm::ou_delete_platoon_notify *msg);\nstring log_uc_oct_login_request( const nad_lcm::uc_oct_login_request *msg);\nstring log_cu_oct_login_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_oct_login_respond > *msg);\nstring log_uc_config_request( const nad_lcm::uc_config_request *msg);\nstring log_cu_config_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_config_respond > *msg);\nstring log_uc_exec_task_request( const nad_lcm::uc_exec_task_request *msg);\nstring log_cu_exec_task_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_exec_task_respond > *msg);\nstring log_uc_stop_task_request( const nad_lcm::uc_stop_task_request *msg);\nstring log_cu_stop_task_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_stop_task_respond > *msg);\nstring log_cr_exec_task_func_request(const nad_lcm::ne_msg_t<nad_lcm::cr_exec_task_func_request> *msg);\nstring log_rc_exec_task_func_respond(const nad_lcm::ne_msg_t<nad_lcm::rc_exec_task_func_respond> *msg);\nstring log_uc_set_ets_request( const nad_lcm::uc_set_ets_request *msg);\nstring log_cr_set_ets_request( const nad_lcm::ne_msg_t<nad_lcm::cr_set_ets_request > *msg);\nstring log_rc_set_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_set_ets_respond > *msg);\nstring log_cu_set_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_set_ets_respond > *msg);\nstring log_uc_add_ets_request( const nad_lcm::uc_add_ets_request *msg);\nstring log_cr_add_ets_request( const nad_lcm::ne_msg_t<nad_lcm::cr_add_ets_request > *msg);\nstring log_rc_add_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_add_ets_respond > *msg);\nstring log_cu_add_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_add_ets_respond > *msg);\nstring log_uc_delete_ets_request( const nad_lcm::uc_delete_ets_request *msg);\nstring log_cr_delete_ets_request( const nad_lcm::ne_msg_t<nad_lcm::cr_delete_ets_request > *msg);\nstring log_rc_delete_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_delete_ets_respond > *msg);\nstring log_cu_delete_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_delete_ets_respond > *msg);\nstring log_rc_log_report( const nad_lcm::ne_msg_t<nad_lcm::rc_log_report > *msg);\nstring log_cu_log_report( const nad_lcm::ne_msg_t<nad_lcm::cu_log_report > *msg);\nstring log_rc_alarm_report( const nad_lcm::ne_msg_t<nad_lcm::rc_alarm_report > *msg);\nstring log_cu_alarm_report( const nad_lcm::ne_msg_t<nad_lcm::cu_alarm_report > *msg);\nstring log_ro_log_report( const nad_lcm::ne_msg_t<nad_lcm::ro_log_report > *msg);\nstring log_ou_log_report( const nad_lcm::ou_log_report *msg);\nstring log_ro_alarm_report( const nad_lcm::ne_msg_t<nad_lcm::ro_alarm_report > *msg);\nstring log_ou_alarm_report( const nad_lcm::ou_alarm_report *msg);\nstring log_sensor_obstacle_report( const nad_lcm::sensor_obstacle_report *msg);\nstring log_uc_call_car_request( const nad_lcm::uc_call_car_request *msg);\nstring log_cu_call_car_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_call_car_respond > *msg);\nstring log_uo_upcall_request( const nad_lcm::uo_upcall_request *msg);\nstring log_or_upcall_request( const nad_lcm::ne_msg_t<nad_lcm::or_upcall_request > *msg);\nstring log_rc_upcall_request( const nad_lcm::ne_msg_t<nad_lcm::rc_upcall_request > *msg);\n\n//oct新增消息\nstring log_uc_call_park_car_request( const nad_lcm::uc_call_park_car_request *msg);\nstring log_cu_call_park_info_report( const nad_lcm::ne_msg_t<nad_lcm::cu_call_park_info_report > *msg);\nstring log_cu_call_park_car_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_call_park_car_respond > *msg);\n\n#if defined(_NAD_CSU_)\n #define MSG_CAP \"CSU\"\n#endif\n#if defined(_NAD_RSU_)\n #define MSG_CAP \"RSU\"\n#endif\n#if defined(_NAD_OBU_)\n #define MSG_CAP \"OBU\"\n#endif\n#if defined(_NAD_RSD_)\n #define MSG_CAP \"RSD\"\n#endif\n#if defined(_NAD_SIM_)\n #define MSG_CAP \"SIM\"\n#endif\n\n/* //写日志的输出法\n#define LOG_RECV(str) LOG(WARNING) << MSG_CAP << \"_recv: \" << str\n#define LOG_SEND(str) LOG(ERROR) << MSG_CAP << \"_send: \" << str\n*/\n\n//不写日志的输出法\n#define LOG_RECV(str) std::cout << endl << \"\\e[0;32m\" << MSG_CAP << \"_recv: \" << str << \"\\e[0m\" << endl\n#define LOG_SEND(str) std::cout << endl << \"\\e[0;34m\" << MSG_CAP << \"_send: \" << str << \"\\e[0m\" << endl\n\n\n#endif\n"
},
{
"alpha_fraction": 0.49529188871383667,
"alphanum_fraction": 0.5254237055778503,
"avg_line_length": 14.171428680419922,
"blob_id": "c3da035796f2df7c0a29348e7837b9c49bc2f560",
"content_id": "4fe8af61fd929006e60215ce5a1c0eefb6f462f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 651,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 35,
"path": "/athena/core/x86/Common/include/base/nad_base.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_base.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:引用了base目录下的所有头文件\n-------------------------------------------------------*/\n#ifndef _NAD_BASE_H\n#define _NAD_BASE_H\n\n\n//公共数据类型\n#include \"nad_type.h\"\n\n//公共返回码\n#include \"nad_retcode.h\"\n\n//公共枚举值\n#include \"nad_enum.h\"\n\n//公共基础函数\n#include \"nad_function.h\"\n\n//日志模块\n#include \"log/nad_glog.h\"\n\n//XML解析器\n#include \"xml/pugixml.hpp\"\nusing namespace pugi;\n\n//数据库\n#include \"db/nad_db.h\"\n\n#include \"timer/nad_timer.h\"\n\n#endif\n"
},
{
"alpha_fraction": 0.708791196346283,
"alphanum_fraction": 0.7280219793319702,
"avg_line_length": 15.906976699829102,
"blob_id": "dbb8c25daec9a654b5a736b7210cf81de947c3f0",
"content_id": "528ad4551d3c6f7a0ab82b59b60564286381a75d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 728,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 43,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/type.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _TYPE_H_\n#define _TYPE_H_\n\n#include <stdlib.h>\n\ntypedef signed char\t\tSchar;\ntypedef signed short\tSshort;\ntypedef signed int\t\tSint;\ntypedef signed long\t\tSlong;\ntypedef unsigned char\tUchar;\ntypedef unsigned short\tUshort;\ntypedef unsigned int\tUint;\ntypedef unsigned long\tUlong;\ntypedef Ulong COLORREF;\n\ntypedef\tint\tBOOL;\n//enum {FALSE = 0, TRUE};\n\n#define FALSE 0\n#define TRUE 1\n\ntypedef union {\n\tUchar b[4];\n\tUshort w[2];\n\tUlong dw;\n\tfloat fl;\n} LogData;\n\ntypedef char\t\t\tS1;\ntypedef unsigned char\tU1;\ntypedef short\t\t\tS2;\ntypedef unsigned short \tU2;\ntypedef long\t\t\tS4;\ntypedef unsigned long\tU4;\ntypedef void\t\t\tVD;\ntypedef char\t\t\tI1;\ntypedef short\t\t\tI2;\ntypedef long\t\t\tI4;\n\ntypedef\tfloat\t\t\tFL;\ntypedef\tdouble\t\t\tDB;\n\n#endif\n\n"
},
{
"alpha_fraction": 0.6615217328071594,
"alphanum_fraction": 0.6719565391540527,
"avg_line_length": 24.414363861083984,
"blob_id": "7e3b182f4acd8b52d57600043a0d5e73b0536d95",
"content_id": "2a7b85f8e1c8dae43603f3038485feb4bea66d81",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5090,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 181,
"path": "/athena/core/arm/Control/include/controller_agent.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller_agent.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_CONTROLLER_AGENT_H_\n#define COMMON_CONTROLLER_AGENT_H_\n\n#include \"controller_config.h\"\n#include \"trajectory.h\"\n#include \"chassis.h\"\n#include \"controller_output.h\"\n#include \"controller_alarm_code.h\"\n#include \"controller_output_alarm.h\"\n#include \"generic_controller.h\"\n#include \"localization.h\"\n#include \"local_localization.h\"\n#include \"common/map_matching/map_matching.h\"\n#include \"lon_controller/lon_controller.h\"\n#include \"lat_controller/lat_controller.h\"\n#include \"lqr_controller/lqr_lat_controller.h\"\n#include \"debug_output.h\"\n#include \"common/map_matching/coordinate_transformation.h\"\n\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\nclass ControllerAgent{\npublic:\n ControllerAgent() = default;\n ~ControllerAgent() = default;\n\n/**\n * @brief initialization.\n * @param[in] controller_config controller config.\n * @return true or false.\n */\n bool Init(const ControllerConfig controller_config_);\n\n/**\n * @brief SetTrajectory.\n * @param[in] trajectory trajectory information.\n * @return void.\n */\n void SetTrajectory(const Trajectory *trajectory);\n\n/**\n * @brief ComputeControlOutput.\n * @param[in] localiation 定位信息.\n * @param[in] chassis 车辆底盘信息.\n * @param[in] controller_output 控制器输出.\n * @return true or false.\n */\n bool ComputeControlOutput(const Localization * localiation,const Chassis *chassis,ControllerOutput * controller_output);\n\n/**\n * @brief GetControllerInfo.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n void GetControllerInfo(DebugOutput &debug_output);\n\n/**\n * @brief SetDrivingModeDebug 设置驾驶模式用作调试.\n * @param[in] mode 0 无效 1 人工驾驶 3 自动驾驶.\n * @return void.\n */\n void SetDrivingModeDebug(int32_t mode);\n\n/**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n void SetTarSpeedDebug(int32_t tar_speed,bool valid);\n\n/**\n * @brief GetAlarmInfo 获取报警信息.\n * @param[out] alarm_list 报警信息.\n * @return void.\n */\n void GetAlarmTableInfo(std::vector <ControllerOutputAlarm::AlarmInfoTable> *alarm_list);\n\n\nprivate:\n ///报警\n ControllerOutputAlarm *controller_output_alarm;\n ///地图点最小限定:30,低于30个点(3米)则不匹配该地图\n const int MAP_POINT_LIM_MIN = 30;\n ///地图点最大限定:10000,(5公里)则不匹配该地图\n const int MAP_POINT_LIM_MAX = 50000;\n ///轨迹匹配点\n int32_t match_point_no_;\n ///与轨迹偏差距离\n double error_;\n ///坐标转换对象\n CoordinateTransformation coordinate_transformation_;\n ///坐标转换输入\n //Localization localization;\n ///坐标转换输出\n LocalLocalization local_localization_;\n ///地图匹配对象\n map_matching matching_;\n ///轨迹处理 双buffer\n path recv_path_1_;\n path recv_path_2_;\n ///双buffer标识\n volatile int32_t switch_buffer_;\n ///基本配置\n ControllerConfig controller_config_;\n ///控制输出\n ControllerOutput controller_output_;\n ///调试输出\n DebugOutput debug_output_;\n ///自动驾驶模式调试\n bool debug_driving_mode_enable_;\n ///自动驾驶模式\n int32_t debug_driving_mode_;\n\n /**\n * @brief GetMapMatchingNum.\n * @param[in] current_x_ 地图坐标x.\n * @param[in] current_y_ 地图坐标y.\n * @param[in] local_path_ 路径.\n * @param[in] length_ 轨迹点个数.\n * @param[in] min_error_ 最小误差.\n * @return true or false.\n */\n int32_t GetMapMatchingNum(double current_x,double current_y,path *local_path,int32_t length,double& min_error);\n\n\n /**\n * @brief PathConvert.\n * @param[in] path_msg_ controller 可处理的轨迹.\n * @param[in] trajectory_msg_ 收到的轨迹.\n * @return void.\n */\n void PathConvert(path& path_msg,const Trajectory *trajectory_msg);\n\n /**\n * @brief SpecialHandler 特殊处理.\n * @param[out] controller_output 控制器输出.\n * @return void.\n */\n void SpecialHandler(ControllerOutput * controller_output);\n\n ///纵向控制器\n LonController lon_controller_;\n ///横向控制器\n LatController lat_controller_;\n ///LQRController\n LQRLatController lqr_lat_controller;\n ///纵向控制器\n GenericController *generic_lon_controller_;\n ///横向控制器\n GenericController *generic_lat_controller_;\n\n /**\n * @brief RegisterGenericController.\n * @param[in] generic_lon_controller 纵向控制器.\n * @param[in] generic_lat_controller 横向控制器.\n * @return void.\n */\n void RegisterGenericController(GenericController * generic_lon_controller,GenericController *generic_lat_controller);\n\nprivate:\n //pthread_mutex_t lock_;\n};\n}//namespace control\n}//namespace athena\n\n#endif // COMMON_CONTROLLER_AGENT_H_\n"
},
{
"alpha_fraction": 0.6937234401702881,
"alphanum_fraction": 0.6998584270477295,
"avg_line_length": 29.27142906188965,
"blob_id": "64943832479761de6b35f34f6870187380622302",
"content_id": "4b83d121cb9705101777f3a1fc289b2c9159310a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2120,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 70,
"path": "/athena/core/arm/Map/include/LaneletBase.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n *\n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include <vector>\n#include <cstdint>\n\n#include \"lanelet_point.hpp\"\n#include \"LineStrip.hpp\"\n#include \"BoundingBox.hpp\"\n#include \"RegulatoryElement.hpp\"\n\nnamespace LLet\n{\n\nenum SIDE\n{\n LEFT = 0,\n RIGHT = 1\n};\n\nclass LaneletBase;\n\ntypedef std::pair< double, double > coord_t;\ntypedef std::shared_ptr< LaneletBase > lanelet_base_ptr_t;\n\nclass LaneletBase\n{\npublic:\n LaneletBase();\n\n /// returns the (lat, lon) pair at the specified index. Throws if index is out of range. If index is negative, it will return\n /// the -nth element from the back. n=0 refers to the first, n=-1 refers to the last element.\n virtual const point_with_id_t& node_at( SIDE bound, int64_t n ) const;\n\n /// returns the vector of points describing the left or right bound.\n virtual const std::vector< point_with_id_t >& nodes( SIDE bound ) const;\n\n /// returns the left and right line strip.\n virtual const std::tuple< strip_ptr_t, strip_ptr_t >& bounds() const = 0;\n\n virtual BoundingBox bb() const;\n virtual const std::vector< regulatory_element_ptr_t >& regulatory_elements() const = 0;\n\n bool fits_before( const lanelet_base_ptr_t& other ) const;\n bool fits_left(const lanelet_base_ptr_t& other) const;\n bool fits_right(const lanelet_base_ptr_t& other) const;\n bool fits_next(const lanelet_base_ptr_t &other) const;\n\n double length() const;\n};\n\n}\n"
},
{
"alpha_fraction": 0.4832838177680969,
"alphanum_fraction": 0.4933135211467743,
"avg_line_length": 23.243244171142578,
"blob_id": "46d97613cd14b029fef30006532da7ea678d5f2e",
"content_id": "1c6513673086476e3c66e2fd4747d84c586a9fb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2886,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 111,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/alarm/nad_warning.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_warning.cpp\n * 创建者:张毅00151602\n * 时 间:2016-10-11\n * 描 述:道路上的预警点\n-------------------------------------------------------*/\n\n//头文件\n#include \"nad_warning.h\"\n\n//从数据库中加载预警点\nvoid nad_warning_list::load_from_db()\n{\n list.clear();\n nad_warning warning;\n map<string, nad_record_warning> &warnings = db_query_all_warning();\n map<string, nad_record_warning>::iterator it = warnings.begin();\n for (; it != warnings.end(); it++)\n {\n nad_record_warning &rec = it->second;\n warning.id = rec.warning_id;\n warning.type = rec.warning_type;\n warning.desc = rec.warning_desc;\n warning.lane_id = rec.lane_id;\n get_lon_lat(warning.lon, warning.lat, warning.id);\n transfer.cs.ll2xy(warning.lat, warning.lon, warning.x, warning.y);\n list.push_back(warning);\n }\n LOG(WARNING) << \"加载预警点 \" << list.size() << \" 个\";\n}\n\n//把预警点绑定到route::RouteBase上\nvoid nad_warning_list::bind_key_point(route::RouteBase &route)\n{\n if (route.route_behavior_.cur_point_.index < 0)\n {\n return;\n }\n\n //绑定每一个点到route上\n for (size_t i = 0; i < list.size(); i++)\n {\n nad_warning *warning = &list[i];\n int index = 0;\n while (index < (int)route.route_behavior_.center_line_.size())\n {\n //查找点\n double dis_min = 0;\n int ret = route.match_center_line_xy(warning->x, warning->y, index, 5.0, 5.0, warning->lane_id, dis_min);\n if (ret != RET_OK)\n {\n break;\n }\n\n //绑定点 //bind key_point 需要重新实现\n //ret= route.bind_key_point_ll(index, get_warning_type(warning->type),\n // warning->id, warning->lat, warning->lon);\n if (ret != RET_OK)\n {\n break;\n }\n\n //迭代到下一个点,用于绕行多圈需要多次绑定\n index += 20;\n }\n }\n}\n\n//获得预警点类型ID\nint get_warning_type(string type)\n{\n if (type == \"single\")\n {\n return KP_W_SINGLE;\n }\n else if (type == \"change_off\")\n {\n return KP_W_CHANGE_OFF;\n }\n else if (type == \"change_on\")\n {\n return KP_W_CHANGE_ON;\n }\n else if (type == \"camera_on\")\n {\n return KP_W_CAMERA_ON;\n }\n else if (type == \"camera_off\")\n {\n return KP_W_CAMERA_OFF;\n }\n else if (type == \"radar_on\")\n {\n return KP_W_RADAR_ON;\n }\n else if (type == \"radar_off\")\n {\n return KP_W_RADAR_OFF;\n }\n else if (type == \"limspeed_off\")\n {\n return KP_W_LIMSPEED_OFF;\n }\n else if (type == \"degrade\")\n {\n return KP_W_DEGRADE;\n }\n\n\n return KP_NONE;\n}\n\n"
},
{
"alpha_fraction": 0.5111607313156128,
"alphanum_fraction": 0.5267857313156128,
"avg_line_length": 21.399999618530273,
"blob_id": "86ccaef2d08da4e88892dbed0099464cbcab1945",
"content_id": "7e29bccd2eb16df06c22b18a068fc074a1568dda",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 938,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 40,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/msg/zmq/zmq_t.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:zmq_t.cpp\n * 创建者:王飞虎\n * 时 间:2016-08-12\n * 描 述:zmq的公共类\n-------------------------------------------------------*/\n\n#include \"zmq_t.h\"\n#include \"../nad_msg.h\"\n\n\nzmq_t::zmq_t(): _context(1)\n{\n pthread_spin_init(&spinlock_zmq, 0);\n}\n\nzmq_t::~zmq_t()\n{\n pthread_spin_destroy(&spinlock_zmq);\n}\n\nvoid zmq_t::dispatch_handlers(nad_lcm::ne_msg_base_t &ne_msg)\n{\n int len = ne_msg.getEncodedSize();\n uint8_t buf[len];\n ne_msg.encode(buf, 0, len);\n\n int64_t recv_utime = lcm_timestamp_now();\n\n lcm_recv_buf_t rbuf;\n rbuf.data = buf;\n rbuf.data_size = len;\n rbuf.recv_utime = recv_utime;\n rbuf.lcm = g_lcm->getUnderlyingLCM();\n\n if (::lcm_try_enqueue_message(rbuf.lcm, ne_msg.header.peer_channel.c_str()))\n {\n ::lcm_dispatch_handlers(rbuf.lcm, &rbuf, ne_msg.header.peer_channel.c_str());\n }\n}\n"
},
{
"alpha_fraction": 0.46403300762176514,
"alphanum_fraction": 0.494693398475647,
"avg_line_length": 55.53333282470703,
"blob_id": "47b5cc6a831923385f1d34dafe7fa15b88ce4a21",
"content_id": "897e052338261e59f53fb4713485aadeee5f6774",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1696,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 30,
"path": "/athena/examples/ROS/src/Perception/display/cfg/Display.cfg",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nPACKAGE = \"display\"\nNODE_NAME = \"lane_ssd_displayConfig\"\nPARAMS_NAME = \"Display\"\n\nfrom math import pi\nfrom dynamic_reconfigure.parameter_generator_catkin import *\n\ngen = ParameterGenerator()\n\n#SSD Detection\nssd = gen.add_group(\"SSD\")\n# name type level description default min max\nssd.add(\"enable_show_ssd\", bool_t, 0, \"Enable show ssd detection result\", False )\nssd.add(\"ssd_r\", int_t, 0, \"Object bounding box color.\", 255, 0, 255 )\nssd.add(\"ssd_g\", int_t, 0, \"Object bounding box color.\", 0, 0, 255 )\nssd.add(\"ssd_b\", int_t, 0, \"Object bounding box color.\", 0, 0, 255 )\nssd.add(\"enable_show_label\", bool_t, 0, \"Enable show object detection label\", False )\nssd.add(\"ssd_line_width\", int_t, 0, \"Object bounding box line width.\", 1, 1, 3 )\n\n\n#Lane Detection\nlane = gen.add_group(\"Lane\")\nlane.add(\"enable_show_lane\", bool_t, 0, \"Enable show lane detection result.\", False )\nlane.add(\"lane_r\", int_t, 0, \"Lane R Channel value.\", 255, 0, 255 )\nlane.add(\"lane_g\", int_t, 0, \"Lane R Channel value.\", 0, 0, 255 )\nlane.add(\"lane_b\", int_t, 0, \"Lane R Channel value.\", 0, 0, 255 )\nlane.add(\"lane_line_width\", double_t, 0, \"Object bounding box line width.\", 2.0, 1, 3 )\n\nexit(gen.generate(PACKAGE, NODE_NAME, PARAMS_NAME))\n"
},
{
"alpha_fraction": 0.528359055519104,
"alphanum_fraction": 0.5459801554679871,
"avg_line_length": 24.758865356445312,
"blob_id": "2dbb9b8490a72db153a069e2279f63678b17b6f4",
"content_id": "b52e1c8747a3a39b2f0cc61aa982d7c596c83482",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4642,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 141,
"path": "/athena/core/x86/Map/include/regulator.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n* @file map_interface.h\n* @brief 导航模块\n* @details 1、对构建地图中的车道进行检查;2、地图上点间的集合属性,包括角度,距离等;3、地图的空间分区域检索检索;4、查询车道、车道的左右边线、以及边线上的点; 5、查询车道上的属性;6、查询道路标志等一些规则属性;7实现道路拓扑的路由\n* @author huanhuan\n* @date 2018/7/16\n* @version v1.0\n* @par Copyright (c):\n* 武汉环宇智行科技有限公司\n* @par History:\n* version: author, date, desc\\n\n*/\n#ifndef _ROADMAP_H_\n#define _ROADMAP_H_\n\nnamespace athena\n{\nnamespace roadmap\n{\n#define NEAREST_LANE_LIST 30.0\n\nclass map_point\n{\npublic:\n double lon; //经度\n double lat; //纬度\n int64_t id; //在OSM地图里面的id\n};\n\n\n\n//限速牌\nclass limspeed\n{\npublic:\n string limspeed_id; //地图没有限速牌,id=lon|lat\n int32_t active_index; //车辆从center_line[active_index]开始按limspeed_value速度行驶\n int32_t limspeed_value; //限速值,单位:km/h\n int16_t alarm_flag; //告警标志\n\n limspeed()\n {\n active_index = -1;\n limspeed_value = 20.0;\n alarm_flag = 0;\n }\n};\n\n//施工标志\nclass block\n{\npublic:\n string block_id; //地图没有施工标志,id=lon|lat\n int32_t stop_index; //车辆停止在center_line[stop_index]位置\n int32_t block_value; //限速值,单位:km/h\n int32_t lane_count; //车道数量\n int16_t alarm_flag; //告警标志\n\n block()\n {\n stop_index = -1;\n block_value = 0;\n lane_count = 2;\n alarm_flag = 0;\n }\n\n //输入当前车道,lane_index=左数车道,第一道是1\n //返回规避方法: CL_DIRECTION_NONE=不阻塞,LEFT=向左换道规划,RIGHT=向右换道规划,BLOCK=堵死\n int check(int lane_index,int op_lane_size = -1);\n};\n\nclass light\n{\npublic:\n int64_t id;\n double mileage; //mileage: + m; active -> stop\n map_point stop_point;\n map_point exit_point;\n map_point light_point;\n};\n\nclass cross_regulator\n{\npublic:\n string name;//路口名称\n int type; //路口类型\n vector<light> flow_light_list_; //flow_light_list 车流的红绿灯\n //vector<map_point> points_; //车流里的与红绿灯相关的点\n};\n\nclass RoadMap\n{\npublic:\n //二期新增动态字段,csu/rsu/obu_planning在路径规划、重规划、每秒刷新时重新设置这些信息\n vector<light> light_list_; ///<红绿灯列表\n vector<limspeed> limspeed_list_; ///<限速牌列表\n vector<block> block_list_; ///<施工标志列表\n vector<lane> lane_list_; ///<路径列表\n vector<cross_regulator> cross_list_; ///<路口列表\n\n /////////////////////////////////////////////////\n /// \\brief 读入RoadMap地图\n /// \\param file_name:地图读入的文件名\n /// \\return RET_OK 0; RET_ERROR 1\n /////////////////////////////////////////////////\n int read_map(string file_name);\n\n void get_flow_light_and_points_from_osm(RoadMap *osm_map);\n /////////////////////////////////////////////////\n /// \\brief 将定位点匹配到最近的lanelet上,返回地图里的lane_id,0表示匹配不到lane\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\param yaw:当前头指向\n /// \\return 返回定位车道的id,0表示匹配不到lane\n /////////////////////////////////////////////////\n int64_t locate_point_on_lanelet(double lat, double lon, double yaw = -1);\n\n ///将定位点匹配到最近的lanelet上,返回lane_list中匹配上的lane id,0表示匹配不到lane\n /////////////////////////////////////////////////\n /// \\brief 将定位点匹配到最近的lanelet上,返回地图里的lane_id,0表示匹配不到lane\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\param yaw:当前头指向\n /// \\return 返回定位车道的id,0表示匹配不到lane\n /////////////////////////////////////////////////\n int64_t locate_point_on_lane_list(double lat, double lon, const vector<int64_t> &lane_list);\n\n /////////////////////////////////////////////////\n /// \\brief 从osm地图中获取lane相关信息\n /// \\param lane_id:输入lane_id信息\n /// \\param lane:输出当前的车道\n /////////////////////////////////////////////////\n void get_lane_from_map_by_id(int64_t lane_id, lane* route_lane);\n\nprivate:\n RoadMap *osm_map_;///< osm地图\n};\n\n}\n}\n#endif // _ROADMAP_H_\n"
},
{
"alpha_fraction": 0.7076923251152039,
"alphanum_fraction": 0.7179487347602844,
"avg_line_length": 40.71428680419922,
"blob_id": "617f3edba77b8e3e405233c65fcb9b1ed668e671",
"content_id": "a35aeee8d0195a138070a2f996b76a64798ee8b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 585,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 14,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/colormisc.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#ifndef\t_COLORMISC_H_\n#define\t_COLORMISC_H_\n\n#include \"type.h\"\n\nextern BOOL convertToPseudColor(double v, Uchar *r, Uchar *g, Uchar *b, double min_v = 0.0, double max_v = 1.0);\nextern BOOL convertToPseudColor2(double v, Uchar *r, Uchar *g, Uchar *b, double min_v, double max_v);\nextern BOOL convertToPseudColor3(double v, Uchar *r, Uchar *g, Uchar *b, double min_v, double max_v);\nextern double rgbtoh(Uchar R, Uchar G, Uchar B);\nextern double rgbtos(Uchar R, Uchar G, Uchar B);\nextern double rgbtoi(Uchar R, Uchar G, Uchar B);\nextern Uchar rgbtog(Uchar R, Uchar G, Uchar B);\n\n#endif\n"
},
{
"alpha_fraction": 0.4637681245803833,
"alphanum_fraction": 0.4637681245803833,
"avg_line_length": 24.518518447875977,
"blob_id": "28bba922367a69468d48c4e1c2171a693b3579bf",
"content_id": "ff92bb9e4361c6f76a58a34ce98b6a4c1d737712",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 690,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 27,
"path": "/athena/examples/LCM/Singlecar/control/apps/track_trajectory/cau_heading_steering.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <fstream>\n#include <iostream>\n\n#include \"common/path.h\"\n#include \"common/math_util.h\"\n#include \"common/map_matching/spline.h\"\n#include \"common/map_matching/heading.h\"\n#include \"common/map_matching//steering_angle.h\"\n\nvoid cau_all_output_from_single_spline(\n path& p,\n int no_points,\n double speed);\n\n\nvoid cau_path_from_spline(path& p, int no_points);\n//////////////////////////////////////////////////////////////////////////////////////////////////\n//\n// for motion planning\n//\n/////////////////////////////////////////////////////////////////////////////////////////////////\n\n\nvoid spline_kp(\n path& p,\n double speed,\n int length );\n\n"
},
{
"alpha_fraction": 0.5181347131729126,
"alphanum_fraction": 0.5302245020866394,
"avg_line_length": 34.50471878051758,
"blob_id": "34d4de133f0d58c16c7b9fd188e267d1dadee745",
"content_id": "f783ae1f493f94e9594a7978a20647f9221ea9e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9337,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 212,
"path": "/athena/core/x86/Navi/include/route.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n* @file route.h\n* @brief 导航模块\n* @details 1、实现车道级的导航;2、输入经纬度或者xy进行相应的定位\n* @author huanhuan\n* @date 2018/7/16\n* @version v1.0\n* @par Copyright (c):\n* 武汉环宇智行科技有限公司\n* @par History:\n* version: author, date, desc\\n\n*/\n\n#ifndef _ROUTE_H\n#define _ROUTE_H\n\n#include <GL/glu.h>\n#include <GL/glut.h>\n#include \"route_data.h\"\n#include \"nad_function.h\"\n\nusing namespace athena;\n//using namespace route;\n\nnamespace athena\n{\nnamespace route\n{\n\n/**\n* @brief 规划基础类,1、车道级的规划以及相应定位功能\n*/\nclass RouteBase\n{\npublic:\n roadmap::RoadMap *map_; ///<地图\n route::coord_transfer transfer_; ///<坐标转换\n\n std::string destination_; ///<目的地名称\n int32_t route_reason_; ///<路径规划原因,参考枚举值:ROUTE_REASON_XXX\n int64_t time_stamp_; ///<产生此路径规划的时间,gettimeofday获得的毫秒数\n //std::vector<roadmap::lane> lane_list_; ///<路径列表\n\n //基于关键点的行为\n Route_Behavior route_behavior_; ///行为点\n //std::vector<center_point> center_line_; ///<最左边车道的中心线 相当于道路级的规划 用于与道路级事件匹配\n //std::vector<route::center_point> key_points_; ///<从起点到终点的关键点,道路级事件\n\n //int32_t cur_center_line_index_; ///<车辆当前在center_line[cur_center_line_index]附近\n //int32_t cur_key_point_index_; ///<车辆当前在key_points[cur_key_point_index]附近\n //route::center_point cur_point_; ///<在route中的当前点,数据来自center_line[cur_center_line_index]\n\n\n route_planning_m route_motion_info_;\n route_planning_m route_motion_info_draw_;\n\n std::map<int64_t,Point_m> blane_lborder_map_;///<兄弟车道左边界,点序列,切片相关\n std::map<int64_t,Point_m> blane_rborder_map_;///<兄弟车道右边界,点序列,切片相关\n std::map<int64_t,Point_m> changelane_lborder_map_; ///<对向车道左边界,点序列,切片相关\n //vector <int64_t> sec_vec;\n int32_t cur_sec_index_; ///<当前位置在section中的定位下标\n int32_t cur_lane_index_; ///<在切片中从左往右lane的下标,依次为0123.\n double mileage_section_acc_; ///<lane_to_section_line的里程计数器\n double mileage_section_send_; ///<每次下发切片的累计里程;\n int32_t ending_point_alarm_cnt_;\n\npublic:\n RouteBase();\n RouteBase(std::string filename);\n\npublic:\n /////////////////////////////////////////////////\n /// \\brief 执行路径规划,成功返回RET_OK,并填充除了ets外的全部信息\n ///\n /// \\param key_point_list:途经点信息\n /// \\param route_reason:路径规划的原因\n /// \\param destination:路径规划目的地名称\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\param yaw:当前头指向\n /// \\param begin:\n /// \\return\n ///\n /////////////////////////////////////////////////\n int routing(std::vector<key_point> &key_point_list, int32_t route_reason, std::string destination, double lat, double lon, double yaw = -1, size_t begin = 0);\n\n /////////////////////////////////////////////////\n /// \\brief 定位cur_center_line_index在center_line中的位置 //实时定位在center_line中的位置,方便后续使用\n ///\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\param yaw:当前头指向\n /// \\return RET_OK:0 , RET_ERROR 1, RET_ROUTE_START_ERR 16 ,RET_ROUTE_END_ERR 17\n ///\n /////////////////////////////////////////////////\n void locate_on_lane(double lat, double lon, double yaw = -1);\n\n /////////////////////////////////////////////////\n /// \\brief 定位当前位置在那个切片哪个车道上\n ///\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\return RET_OK:0 , RET_ERROR 1\n ///\n /////////////////////////////////////////////////\n void locate_position_on_section_ll(double lat, double lon);\n /////////////////////////////////////////////////\n /// \\brief 定位当前位置在那个切片哪个车道上\n ///\n /// \\param x:当前定位纬度\n /// \\param y:当前定位经度\n /// \\return RET_OK:0 , RET_ERROR 1\n ///\n /////////////////////////////////////////////////\n void locate_position_on_section__xy(double x, double y);\n\n /////////////////////////////////////////////////\n /// \\brief 将lane_list转化为高速道中心线\n /// \\return\n /// 函数最终会填写 route_motion_info,blane_lborder_map,blane_rborder_map,changelane_lborder_map\n /////////////////////////////////////////////////\n void lane_to_center_line(double lat = -1.0, double lon = -1.0);\n\n /////////////////////////////////////////////////\n /// \\brief 将车道lane_list转换为切片的序列\n /// \\return\n /// 函数最终会填写 route_motion_info,blane_lborder_map,blane_rborder_map,changelane_lborder_map\n /////////////////////////////////////////////////\n void lane_to_section_line(double lat = -1.0, double lon = -1.0);\n\n ////////////////////////////////////////////////\n /// \\brief 清空route对象中所有记录\n /// \\return\n ///清理对象中的变量\n /////////////////////////////////////////////////\n void clear();\n\n ////////////////////////////////////////////////\n /// \\brief 在中心线上进行匹配\n /// \\return RET_OK:0 , RET_ERROR 1\n /// \\param lat:当前位置的纬度\n /// \\param lon:当前位置的经度\n ///在中心线上搜索点,返回:RET_OK=存在<min_ok的点,RET_NOT_SAME=存在>=min_ok的点,RET_NOT_EXIST=无点\n ///例如min_ok=3表示: [首次距离<3米, 其后>3米]的区间内最小的点\n /////////////////////////////////////////////////\n int match_center_line_ll(double lat, double lon, int &index, double min_ok, double range, int64_t lane_id, double &dis_min);\n\n ////////////////////////////////////////////////\n /// \\brief 在中心线上进行匹配\n /// \\return RET_OK:0 , RET_ERROR 1\n /// \\param x:高斯投影之后的x坐标\n /// \\param y:高斯投影之后的y坐标\n ///在中心线上搜索点,返回:RET_OK=存在<min_ok的点,RET_NOT_SAME=存在>=min_ok的点,RET_NOT_EXIST=无点\n ///例如min_ok=3表示: [首次距离<3米, 其后>3米]的区间内最小的点\n ///匹配后的结果填入到\n /////////////////////////////////////////////////\n int match_center_line_xy(double x, double y, int &index, double min_ok, double range, int64_t lane_id, double &dis_min);\n\n ////////////////////////////////////////////////\n /// \\brief 在切片序列上进行匹配\n /// \\return RET_OK:0 , RET_ERROR 1\n /// \\param lat:当前位置的纬度\n /// \\param lon:当前位置的经度\n ///在中心线上搜索点,返回:RET_OK=存在<min_ok的点,RET_NOT_SAME=存在>=min_ok的点,RET_NOT_EXIST=无点\n ///例如min_ok=3表示: [首次距离<3米, 其后>3米]的区间内最小的点\n ///匹配后的结果填入到\n /////////////////////////////////////////////////\n int match_section_line_ll(double lat,double lon,int &sec_index,int &lane_index, double min_ok, double range, double &dis_min);\n\n ////////////////////////////////////////////////\n /// \\brief 在切片序列上进行匹配\n /// \\return RET_OK:0 , RET_ERROR 1\n /// \\param x:高斯投影之后的x坐标\n /// \\param y:高斯投影之后的y坐标\n ///在中心线上搜索点,返回:RET_OK=存在<min_ok的点,RET_NOT_SAME=存在>=min_ok的点,RET_NOT_EXIST=无点\n ///例如min_ok=3表示: [首次距离<3米, 其后>3米]的区间内最小的点\n ///匹配后的结果填入到\n /////////////////////////////////////////////////\n int match_section_line_xy(double x,double y,int &sec_index,int &lane_index, double min_ok, double range, double &dis_min);\n\n void route_section_clear(route_planning_m &route_sec);\n\n int get_op_lane_size(double cur_lat,double cur_lon,double cur_yaw);\n\n int get_op_lane_size(int64_t lane_id);\n\n ///返回当前左边或者右边的可变道车道数目\n int get_change_lane(double cur_lat,double cur_lon,double cur_yaw,int turn);\n\n int get_change_lane(int64_t lane_id,int turn);\n\n ///把中心线的某个点修改为关键点,传入xy或ll可以判断在哪个车道,成功返回RET_OK\n int bind_key_point(int &index, int type, std::string id, bool *insert_flag = NULL);\n int bind_key_point_xy(int &index, int type, std::string id, double x, double y);\n int bind_key_point_ll(int &index, int type, std::string id, double lat, double lon);\n\n ///跟显示有关的函数\n ///用opengl画出nad_lane和centrol_line以及当前定位的点\n void draw();\n void draw_part_line();\n void draw_part_section_line();\n ///使用opengl画lanelet,show的私有函数\n void draw_lanelet(lanelet_ptr_t& ll_draw);\n\nprivate:\n ///获取当前时间(毫秒)\n int64_t get_current_route_time();\n\n};\n}\n}\n#endif\n"
},
{
"alpha_fraction": 0.6255212426185608,
"alphanum_fraction": 0.6313594579696655,
"avg_line_length": 22,
"blob_id": "85c4d64af343536bc54c6604b3133a5fa3476153",
"content_id": "2c25b3387f6cd284124043b0a3393c5a5530caad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1443,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 52,
"path": "/athena/examples/LCM/Singlecar/control/common/logging.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#include \"logging.h\"\n#include <glog/logging.h>\nnamespace athena{\nnamespace control{\nbool Logging::log_enable_ = true;\nvoid Logging::Init(bool log_enable)\n{\n log_enable_ = log_enable;\n //级别高于ERROR的才输出到屏幕上\n google::SetStderrLogging(google::WARNING);\n //设置输出到屏幕的日志显示相应颜色\n FLAGS_colorlogtostderr=true;\n //设置 google::INFO 级别的日志存储路径和文件名前缀\n google::SetLogDestination(google::INFO,\"log/INFO_\");\n //设置 google::WARNING 级别的日志存储路径和文件名前缀\n google::SetLogDestination(google::WARNING,\"log/WARNING_\");\n //设置 google::ERROR 级别的日志存储路径和文件名前缀\n google::SetLogDestination(google::ERROR,\"log/ERROR_\");\n //缓冲日志输出,默认为30秒,此处改为立即输出\n FLAGS_logbufsecs =0;\n //最大日志大小为 10MB\n FLAGS_max_log_size =10;\n //当磁盘被写满时,停止日志输出\n FLAGS_stop_logging_if_full_disk = true;\n //捕捉 core dumped\n google::InstallFailureSignalHandler();\n}\n\nvoid Logging::LogInfo(int level,std::string info)\n{\n if(log_enable_ == false)\n {\n return;\n }\n\n switch(level)\n {\n case INFO:\n LOG(INFO) <<info;\n break;\n case WARNING:\n LOG(WARNING) <<info;\n break;\n case ERROR:\n LOG(ERROR) <<info;\n break;\n default:\n break;\n }\n}\n}\n}\n\n\n"
},
{
"alpha_fraction": 0.6637609004974365,
"alphanum_fraction": 0.6699875593185425,
"avg_line_length": 21.94285774230957,
"blob_id": "cd041f5d5d55920678518662570248a53bab2170",
"content_id": "581db49f7d5c7f5caa69ffc353e1af6066467175",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2743,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 105,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/distance_calculation.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/// //////////////////////////////////////////////\n///@file 计算相机到路面目标的距离\n///@brief 需要标定相机 相机内参 安装角度位置\n///@author duohaoxue\n///@version v1.0\n///@date 2018.07.19\n/// //////////////////////////////////////////////\n #pragma once\n #include <iostream>\n #include <opencv2/core/core.hpp>\n #include <opencv2/highgui/highgui.hpp>\n #include <opencv2/imgproc/imgproc.hpp>\n\n/// 计算路面目标到相机的距离,包含相机内参和 安装位置\n class Calculate_distance\n {\npublic:\n /// 相机主光点 横向\n double CX0;\n /// 相机主光点 纵向\n double CY0 ;\n /// 相机焦距\n double FOCUS;\n /// 相机水平方向\n double POSX ;\n /// 相机垂直方向安装高度\n double POSY ;\n /// 相机相对路面方向\n double POSZ ;\n /// 相机安装俯仰角\n float PITCH;\n /// 相机安装航向角\n double YAW ;\n /// 水平单位像素距离\n double m_per_pix_j;\n ///垂直单位像素距离\n double m_per_pix_i;\n\nprivate:\n\n double getRoadTopY;\n\n double getRoadLeftX ;\n\n double getRoadImageHeight;\n\n double getRoadImageWidth;\n\n double getRoadRealHeight;\n\n double getRoadRealWidth;\n\n ///正矩阵\n double calcVimageFromJsrc(int iJsrc);\n\n double calcUimageFromIsrc(int iIsrc);\n\n double calcYcameraFromUimage(double dUimage);\n\n double calcZcameraFromYcameraOnRoad(double dYcamera);\n\n double calcXcameraFromVimage(double dVimage);\n\n double calcZvehicleFromZcamera(double dZcamera);\n\n double calcXvehicleFromXcamera(double dXcamera);\n\n double calcIroadFromZvehicle(double dZVehicle);\n\n double calcJroadFromXvehicle(double dXVehicle);\n\n\n ///反矩阵\n double calcJsrcFromVimage( double dVimage);\n\n double calcIsrcFromUimage( double dUimage);\n\n double calcYcameraFromZcameraOnRoad(double dZcamera);\n\n double calcUimageFromYcamera(double dYcamera);\n\n double calcVimageFromXcamera( double dXCamera);\n\n double calcZcameraFromZvehicle(double dZVehicle);\n\n double calcXcameraFromXvehicle( double dXvehicle);\n\n double calcZvehicleFromIroad(double iIroad);\t// Iroad->Zvehicle\n\n double calcXvehicleFromJroad(double a_iJroad);\t// Jroad->Xvehicle\n\n\npublic:\n ///\n Calculate_distance();\n /// 图像像素转换到路面位置\n void transformInputToRoad(double iIsrc, double iJsrc, double *pdZVehicle, double *pdXVehicle);\n ///计算路面上目标框到相机的距离\n void calcPosRoadFromPosImage(cv::Rect &rect,double *longitudinal,double *lateral,double *veh_width);\n /// 路面目标位置转换到图像像素\n void transformInputToImage(double pdZVehicle, double pdXVehicle,double *iIsrc, double *iJsrc);\n\n\n\n };\n"
},
{
"alpha_fraction": 0.4986259937286377,
"alphanum_fraction": 0.5192835927009583,
"avg_line_length": 27.444744110107422,
"blob_id": "536047562394197b24b562d98d1990eae4805b6d",
"content_id": "0a183477e70143a73ddd0c57f6733505b58403bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 11129,
"license_type": "no_license",
"max_line_length": 158,
"num_lines": 371,
"path": "/athena/examples/LCM/Singlecar/control/apps/track_trajectory/track_trajectory.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"track_trajectory.h\"\n\nnamespace athena{\nnamespace control{\nvoid TrackTrajectory::Init(string local_trajectory_path,ControlLogic *control_logic)\n{\n control_logic_ = control_logic;\n recv_path_.read_navi_file(local_trajectory_path,&control_logic -> controller_config_);\n CauAllOutputFromSingleSpline(recv_path_, recv_path_.ref_points_.size(), 10);\n CauPathFromSpline(recv_path_, recv_path_.ref_points_.size());\n PathToTrajectory(recv_path_,trajectory_);\n control_logic -> SubscribeTrajectory(trajectory_);\n}\n\nvoid TrackTrajectory::PathToTrajectory(path &path,Trajectory &trajectory)\n{\n ///轨迹点数量\n trajectory.points_.clear();\n NavPoints navpoints;\n trajectory.num_of_points_ = path.ref_points_.size();\n\n for(int i = 0; i < trajectory.num_of_points_;i++)\n {\n ///GPS 时间\n navpoints.gps_time_ = 0.0;\n ///x值\n navpoints.p_x_ = path.ref_points_[i].position_x_;\n ///y值\n navpoints.p_y_ = path.ref_points_[i].position_y_;\n ///里程值\n navpoints.s_ = path.ref_points_[i].s_;\n ///到达该点的速度\n navpoints.p_v_ = tar_speed_;\n ///到达该点的加速度\n navpoints.p_a_ = path.ref_points_[i].accelerataion_;\n ///到达该点的航向角\n navpoints.p_h_ = path.ref_points_[i].heading_;\n ///到达该点的曲率\n navpoints.p_k_ = path.ref_points_[i].k_s_;\n //std::cout<<\"p_k_:\"<<navpoints.p_k_ <<endl;\n ///到达该点的档位\n navpoints.p_g_ = path.ref_points_[i].p_g_;\n\n trajectory.points_.push_back(navpoints);\n }\n}\n\nvoid TrackTrajectory::CauAllOutputFromSingleSpline(path& p, int no_points, double speed)\n{\n if( no_points < 10 )\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n int i,k;\n\n std::vector<double> x, y, s, h, st;\n std::vector<double> xx, yy, ss, hh, stst;\n\n tk::spline s_x1, s_y1;\n\n double steering_ks;\n double heading_ks;\n double Ux_ks;\n double cs, r;\n\n double INTERPOLATION_INTERVAL = 7;\n double spline_every_ = 0.1;\n int interval = (int )(INTERPOLATION_INTERVAL/spline_every_ ); //INTERPOLATION_INTERVAL = 7; spline_every_ = 0.1\n if(no_points < 3*interval)\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n // 采集均匀的控制点\n k=0;\n /* for( i=0; i< no_points; i +=INTERPOLATION_POINT_NUM)\n {\n // if(p.ref_points_[i].s > k*INTERPOLATION_INTERVAL) //0.348925 //0.308564\n {\n s.push_back(p.ref_points_[i].s);\n x.push_back(p.ref_points_[i].position_x);\n y.push_back(p.ref_points_[i].position_y);\n k++;\n }\n }*/\n //先加入一个\n s.push_back(p.ref_points_[0].s_);\n x.push_back(p.ref_points_[0].position_x_);\n y.push_back(p.ref_points_[0].position_y_);\n k++;\n // 内部计算里程,可能出现负值的情况。\n // 70\n for(i= interval; i< no_points; i += interval )\n {\n // if(p.ref_points_[i].s > k*INTERPOLATION_INTERVAL\n // && p.ref_points_[i].s > s.back() + 0.5 )\n if(p.ref_points_[i].s_ > s.back() + 0.1)\n {\n s.push_back(p.ref_points_[i].s_);\n x.push_back(p.ref_points_[i].position_x_);\n y.push_back(p.ref_points_[i].position_y_);\n k++;\n }\n }\n if(s.size() < 2)\n {\n cout << \"spline points not enough\" << endl;\n abort();\n }\n\n //里程的插值方法\n s_x1.set_points(s,x);\n s_y1.set_points(s,y);\n\n p.ref_points_.clear(); //?????? big error\n navi_point np;\n\n cs = 0;\n i = 0;\n while( cs<= s[s.size()-1])\n {\n cs = i * spline_every_; // 0.1\n Ux_ks = const_speed_; // speed;\n\n steering_ks = cau_steering_angle_from_ks(\n s_x1,\n s_y1,\n cs,\n Ux_ks,\n r,\n steering_cau_one_point_,\n steering_cau_two_point_,\n control_logic_ -> controller_config_.steer_tranmission_ratio_);\n\n steering_ks = iclamp(steering_ks, control_logic_ -> controller_config_.min_steering_angle_ ,control_logic_ -> controller_config_.max_steering_angle_);\n\n //add by alex 20170607 new way to calc heading\n heading_ks = cau_heading_angle_from_ks(s_x1,s_y1,cs);\n\n np.s_ = cs;\n np.point_no_ = i;\n np.heading_ = heading_ks;\n np.steering_angle_ = steering_ks;\n //std::cout<<\"steering_angle:\"<<np.steering_angle_<<endl;\n np.position_x_ = s_x1(cs);\n np.position_y_ = s_y1(cs);\n np.k_s_ = 1/r;\n\n int every = mark_every_;\n if( i % every == 0 ) //控制点密度和当前速度相关。speed\n np.control_mark_ = 1;\n else\n np.control_mark_ = 0;\n\n p.ref_points_.push_back(np);\n\n i++;\n }\n\n int length = p.ref_points_.size();\n SplineKp(p, speed, length );\n\n}\n\n//add by alex 20170615 对接收的轨迹重新插值计算\n//使用该方法的前提是传入的轨迹点距必须时0.1m\n//新轨迹需要的信息:x,y,heading,ks,s,steering_angle,gps_time,v,a,档位,其中gps_time,v,a,档位直接读取原始值\n//根据曲率变化重新插值\n#define KS_STEP 0.015\n#define S_STEP 20\nvoid TrackTrajectory::CauPathFromSpline(path& p, int no_points)\n{\n if( no_points < 10 )\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n int i,k;\n\n std::vector<double> x, y, s;\n //std::vector<double> steer_angle, point_speed;\n tk::spline s_x1, s_y1;\n //tk::spline s_steer,s_speed;\n\n double steering_ks;\n double heading_ks;\n double Ux_ks;\n double cs, r;\n\n double INTERPOLATION_INTERVAL = 7;\n double spline_every_ = 0.1;\n\n int interval = (int )(INTERPOLATION_INTERVAL/spline_every_ ); //INTERPOLATION_INTERVAL = 7; spline_every_ = 0.1 每70个点取一个点\n if(no_points < 3*interval)\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n // 采集均匀的控制点\n k=0;\n //先加入一个\n s.push_back(p.ref_points_[0].s_);\n x.push_back(p.ref_points_[0].position_x_);\n y.push_back(p.ref_points_[0].position_y_);\n k++;\n // 内部计算里程,可能出现负值的情况。\n // 70\n for(i= interval; i< no_points; i += interval )\n {\n // if(p.ref_points_[i].s > k*INTERPOLATION_INTERVAL\n // && p.ref_points_[i].s > s.back() + 0.5 )\n if(p.ref_points_[i].s_ > s.back() + 0.1)\n {\n s.push_back(p.ref_points_[i].s_);\n x.push_back(p.ref_points_[i].position_x_);\n y.push_back(p.ref_points_[i].position_y_);\n k++;\n }\n double distance = 0;\n double delta_ks=0;\n interval = 1;\n while(distance < S_STEP && (i+interval < no_points) && (delta_ks < KS_STEP))//取点改为每隔3m取一个点\n {\n distance = length_two_points(p.ref_points_[i+interval].position_x_, p.ref_points_[i+interval].position_y_,\n p.ref_points_[i].position_x_, p.ref_points_[i].position_y_);\n delta_ks = fabs(p.ref_points_[i].k_s_ - p.ref_points_[i+interval].k_s_);\n interval++;\n }\n }\n if(s.size() < 2)\n {\n cout << \"spline points not enough\" << endl;\n abort();\n }\n\n //里程的插值方法\n s_x1.set_points(s,x);\n s_y1.set_points(s,y);\n\n path path_bak;\n path_bak.ref_points_.clear();\n path_bak.reset_path(p.ref_points_);//copy some msg to path_bak\n\n p.ref_points_.clear();\n navi_point np;\n\n cs = 0;\n i = 0;\n while( cs<= s[s.size()-1])\n {\n cs = i * spline_every_; // 0.1\n Ux_ks = const_speed_; // speed;\n\n if(cs <= 5.5)\n {\n steering_ks = cau_steering_angle_from_ks(s_x1,s_y1,cs,Ux_ks,r,0,8.5,control_logic_ -> controller_config_.steer_tranmission_ratio_);\n }\n else\n {\n steering_ks = cau_steering_angle_from_ks(\n s_x1,\n s_y1,\n cs,\n Ux_ks,\n r,\n -5.5,//-5.5 STEERING_CAU_ONE_POINT debug by alex\n 8.5,\n control_logic_ -> controller_config_.steer_tranmission_ratio_);//8.5 STEERING_CAU_TWO_POINT\n }\n\n steering_ks = iclamp(steering_ks, control_logic_ -> controller_config_.min_steering_angle_, control_logic_ -> controller_config_.max_steering_angle_);\n\n //add by alex 20170607 new way to calc heading\n heading_ks = cau_heading_angle_from_ks(s_x1,s_y1,cs);\n\n np.s_ = cs;\n np.point_no_ = i;\n np.heading_ = heading_ks;\n\n np.steering_angle_ = steering_ks;\n np.position_x_ = s_x1(cs);\n np.position_y_ = s_y1(cs);\n\n //add by alex20170615\n np.pos_gps_time_ = path_bak.ref_points_[i].pos_gps_time_;\n np.acceleration_desired_Axs_ = path_bak.ref_points_[i].acceleration_desired_Axs_;\n np.speed_desired_Uxs_ = path_bak.ref_points_[i].speed_desired_Uxs_;\n //np.curvature_ = path_bak.ref_points_[i].curvature_;\n np.p_g_ = path_bak.ref_points_[i].p_g_;\n\n np.k_s_ = 1.0/r;//曲率反向\n np.curvature_ = np.k_s_;\n\n\n p.ref_points_.push_back(np);\n\n i++;\n }\n path_bak.ref_points_.clear();\n\n int length = p.ref_points_.size();\n SplineKp(p, 8, length );\n\n}\n\n// 计算动态KP值(elvis最新修改,修改前会造成内存错误)\nvoid TrackTrajectory::SplineKp(path& p, double speed, int length )\n{\n int i;\n std::vector<double> ss, skp; // 构造二维矩阵:X,Y\n double cs;\n double cau_kp;\n ss.clear();\n skp.clear();\n\n // 550 -> 12\n // 15 -> 2\n // 550 -15 = 545/10 = 54.5\n\n // 200 / 54.5 = 4+2 = 6\n // 400 / 54.5 = 8+2 = 10\n double max_steering = 0;\n if((length < 30) || (length > 5000)) return; //地图检验\n for(i=0; i< length; i ++)\n {\n max_steering = fabs(p.ref_points_[i].steering_angle_);\n cau_kp = (max_steering)/kp_slope_ + kp_value_;\n\n if(cau_kp > 10.0)\n cau_kp = 10;\n\n p.ref_points_[i].suggest_kp_ = cau_kp;\n }\n\n int every = mark_every_;//70\n\n for(i=0; i< length; i += every)\n {\n if (i==0)\n {\n ss.push_back(p.ref_points_[i].s_); // 将曲率传入到矩阵中\n skp.push_back(p.ref_points_[i].suggest_kp_);// 将推荐KP传入到矩阵中\n }\n else\n {\n if(p.ref_points_[i].s_ > ss.back() + 0.5)\n {\n ss.push_back(p.ref_points_[i].s_);\n skp.push_back(p.ref_points_[i].suggest_kp_);\n }\n }\n }\n // 新建一个拟合器\n tk::spline skp_st;\n // 拟合器更新拟合点\n skp_st.set_points(ss, skp);// tips\n\n for(i=0; i< length; i++)\n {\n cs = p.ref_points_[i].s_;\n cau_kp = skp_st(cs);\n p.ref_points_[i].suggest_kp_ = cau_kp;\n }\n}\n}\n}\n"
},
{
"alpha_fraction": 0.7387387156486511,
"alphanum_fraction": 0.7567567825317383,
"avg_line_length": 19.18181800842285,
"blob_id": "ff49be02443ba44f205dc71bd8ba314be2c961f2",
"content_id": "f2d6ebe5efbdb5c73f9c4574f03613e701a8638f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 396,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 11,
"path": "/athena/examples/titan3/launch_car/conf/readme.txt",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "1、config.xml\n 配置了每个网元的名称和IP,当前版本网络IP在这里配死。\n\n2、db/\n 数据库文件,csu/rsu/obu/rsd/sim_system支持该内存数据库\n\n3、task/\n 脚本,可在oct执行脚本,OBU启动正常时运行\"_obu_name.task\"脚本,RSU启动正常时运行\"_rsu_name.task\"脚本\n\n4、simulate/\n 仿真脚本,sim_system加载这里的文件实现仿真\n"
},
{
"alpha_fraction": 0.6927560567855835,
"alphanum_fraction": 0.6994171738624573,
"avg_line_length": 24.553192138671875,
"blob_id": "3e4312f77d807a78339ce4fb3fd970bfa5ac644d",
"content_id": "2913e5737da47605449c14faa8cd7e8fc8749fdf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1202,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 47,
"path": "/athena/core/arm/Map/include/LLTree.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n *\n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include \"RTree.h\"\n#include \"BoundingBox.hpp\"\n#include \"Lanelet.hpp\"\n\n#include <vector>\n#include <tuple>\n\n/* Wraps the RTree and provides some smarter access methods. */\n\nnamespace LLet\n{\n\nusing std::get;\n\nstruct LLTree\n{\n typedef RTree< int64_t, double, 2 > tree_t;\n void insert(const lanelet_ptr_t& obj);\n std::vector< lanelet_ptr_t > query( const BoundingBox& bb);\nprivate:\n tree_t _tree;\n std::vector< lanelet_ptr_t > _lanelets;\n\n};\n\n}\n"
},
{
"alpha_fraction": 0.6144578456878662,
"alphanum_fraction": 0.634036123752594,
"avg_line_length": 16.473684310913086,
"blob_id": "011812138e34ebb610c554192ff6faeaf215d12c",
"content_id": "61d2a003f0d623b13aacf96075744bfad8340851",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 664,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 38,
"path": "/athena/core/x86/Control/include/debug_output.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file debug_output.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_DEBUG_OUTPUT_H_\n#define COMMON_DEBUG_OUTPUT_H_\n\n#include \"common/path.h\"\n#include \"local_localization.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class controller\n * @brief controller base.\n */\nclass DebugOutput{\npublic:\n DebugOutput() = default;\n ~DebugOutput() = default;\n double tar_speed_;\n int32_t match_point_num_;\n path path_;\n LocalLocalization local_localization_;\n};\n}\n}\n#endif // COMMON_DEBUG_OUTPUT_H_\n"
},
{
"alpha_fraction": 0.540617823600769,
"alphanum_fraction": 0.5846682190895081,
"avg_line_length": 28.627119064331055,
"blob_id": "9df0ce549dd6ad4c23e1679fbb3c4c3ff7ef15ad",
"content_id": "f07b037dc8bd634f34e26823c7e57c9c0c9eff7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2288,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 59,
"path": "/athena/core/x86/Common/include/base/nad_retcode.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_retcode.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:公共返回码\n-------------------------------------------------------*/\n#ifndef _NAD_RETCODE_H\n#define _NAD_RETCODE_H\n\n//返回值数据类型为int\n\n//公共返回值\n#define RET_OK 0\n#define RET_ERROR 1\n#define RET_NORMAL 2\n\n//公共返回码\n#define RET_EXIST 10 //新增的记录已存在\n#define RET_NOT_EXIST 11 //修改或删除的记录不存在\n#define RET_LCM_INIT_ERR 12 //lcm初始化失败\n#define RET_DB_CONN_ERR 13 //数据库连接错误\n#define RET_DB_QUERY_ERR 14 //数据库查询错误\n#define RET_FILE_ERR 15 //文件操作错误\n#define RET_ROUTE_START_ERR 16 //在地图上找不到路径规划起点\n#define RET_ROUTE_END_ERR 17 //在地图上找不到路径规划终点\n#define RET_ROUTE_NO_RESULT 18 //没有合适的路径\n#define RET_ROUTE_NOT_MATCH 19 //路径规划结果的时戳不一致\n#define RET_CONTINUE 20 //执行脚本时,脚本没有执行完毕\n#define RET_FINISHED 21 //执行脚本时,没有下一行脚本,成功结束\n#define RET_MISS_QUOTE 22 //引号不匹配\n#define RET_MISS_BRACKET 23 //括号不匹配\n#define RET_BAD_CHAR 24 //无效的字符\n#define RET_CROSS_REF 25 //脚本交叉引用\n#define RET_BAD_FUNC 26 //无效的函数\n#define RET_BAD_STATE 27 //无效的state\n#define RET_BAD_PARA 28 //无效的参数\n#define RET_TIMEOUT 29 //超时\n#define RET_IN_PLATOON 30 //目前正在编队中\n#define RET_SAME 31 //相同\n#define RET_NOT_SAME 32 //不同\n#define RET_BUSY 33 //繁忙\n#define RET_FORBID 34 //禁止操作\n#define RET_IN_COOPERATE 35 //协助式换道中\n\n#define RET_DEGRADE_FORBIT 36 //驾驶降级阶段禁止切换自动驾驶\n\n\n//OCT召车对应的返回码\n#define RET_CALL_CAR_ONLY 0 //只允许召车\n#define RET_PARK_CAR_ONLY 1 //只允许还车\n#define RET_IN_CALL_CAR 2 //召车中\n#define RET_IN_PARK_CAR 3 //还车中\n\n\n//获得返回码的字符串格式描述\nconst char *ret_str(const int ret);\n\n\n#endif\n"
},
{
"alpha_fraction": 0.5629629492759705,
"alphanum_fraction": 0.5918518304824829,
"avg_line_length": 18.852941513061523,
"blob_id": "d4359913d53c7ed467b21f56f8840fc501cab0ac",
"content_id": "5e7b63679af62d6f03ddae27f2f212da64a9d2f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1516,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 68,
"path": "/athena/core/x86/Control/include/controller_output.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller_output.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_OUTPUT_H_\n#define CONTROLLER_OUTPUT_H_\n\n#include \"controller_output_alarm.h\"\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n\n/**\n * @class ControllerOutput\n *\n * @brief controller output.\n */\nclass ControllerOutput\n{\n public:\n ControllerOutput()\n {\n steering_angle_ = 0.0;\n steering_angle_speed_ = 0.0;\n steering_driving_mode_ = 0;\n acc_value_ = 0.0;\n brake_value_ = 0.0;\n acc_driving_mode_ = 0;\n brake_driving_mode_ = 0;\n epb_status_ = false;\n epb_driving_mode_ = 0;\n }\n ~ControllerOutput() = default;\n\n //ControllerOutputAlarm controller_ouput_alarm_;\n\n ///转向角度\n double steering_angle_;\n ///转向角速度\n double steering_angle_speed_;\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t steering_driving_mode_;\n ///节气门输出\n double acc_value_;\n ///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t acc_driving_mode_;\n ///刹车值\n double brake_value_;\n///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t brake_driving_mode_;\n ///EPB状态\n bool epb_status_;\n ///EPB控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t epb_driving_mode_;\n};\n}\n}\n\n#endif // CONTROLLER_OUTPUT_H_\n"
},
{
"alpha_fraction": 0.5302526354789734,
"alphanum_fraction": 0.5377880930900574,
"avg_line_length": 20.13348960876465,
"blob_id": "0460f3dd44cd56ccf9c24a9e76bc09451c318d1d",
"content_id": "24c7bb6ed0b71751251913ae8cb2b8ac932a43f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 12124,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 427,
"path": "/athena/core/x86/Planning/include/collision_check/collision_check.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 该库提供了一些路面信息,障碍物信息和碰撞检测算法。\n */\n\n#ifndef _COLLISION_CHECK_H\n#define _COLLISION_CHECK_H\n\n\n#pragma once\n\n#include <math.h>\n#include <iostream>\n\n#include <sys/sem.h>\n#include <vector>\n\n#include \"common/navi_point.h\"\n#include \"common/path.h\"\n\n//#include \"ecu.h\"\n\n#include \"common/point.h\"\n#include \"common/rect.h\"\n//#include \"origin_vehicle.h\"\n\n#define PATCHS_WIDTH 20 ///<建立网格,横向20个格子\n#define PATCHS_LENGTH 20 ///<建立网格,纵向20个格子\n#define PATCHS_WIDTH_PER_M 0.6 ///<建立网格,每个格子的边长,0.6米\n\n\n/**\n * @class patch\n * @brief 该类表示对地面区域建立的格网。\n */\nclass patch\n{\npublic:\n ///一个格网的代价值,表示通过地形的代价,越小越好。\n int value;\n ///一个格网里包含多少个点\n int num;\n ///一个格网内所有点高程得的总值\n float total;\n ///一个格网内所有点高程得的平均值\n float average;\n ///一个格网内所有点高程得的方差\n float variance;\n ///一个格网内四个角上的坐标。构成矩阵\n rect rt;\n ///参考轨迹上的投影起点 和终点的位置。\n float s1, s2;\n ///参考轨迹上的偏移宽度的距离\n float l1, l2;\n ///记录了投影到当前格网中的左右的点的信息\n vector<point> patch_points;\n\npublic:\n /**\n * @brief 构造函数\n */\n patch()\n {\n value = 0;\n num = 0;\n total = 0;\n average = 0;\n variance = 0;\n s1 = s2 = 0;\n l1 = l2 = 0;\n patch_points.clear();\n }\n\n /**\n * @brief 析构函数\n */\n ~patch()\n {\n\n }\n\n /**\n * @brief 判断一个点是否落在该网格内。\n * @param p 输入量:点的信息。\n * @return 1表示点落在该网格内,0表示没有。。\n */\n bool point_in_patch(point p);\n};\n\n/**\n * @class ObstacleInfo\n * @brief 传感器输出的一个障碍物信息。\n */\nclass ObstacleInfo\n{\npublic:\n /**\n * @brief 构造函数\n */\n ObstacleInfo()\n {\n type_ = 0;\n id_ = 0;\n x_ = 0.0;\n y_ = 0.0;\n width_ = 0.0;\n height_ = 0.0;\n yaw_ = 0.0;\n speed_ = 0.0;\n gps_time_ = 0.0;\n }\n\n /**\n * @brief 析构函数\n */\n ~ObstacleInfo()\n {\n\n }\npublic:\n int32_t type_; ///<障碍物类型\n\n std::string source_; ///<障碍物来源\n\n int32_t id_; ///<障碍物id\n\n double x_; ///<障碍物x坐标\n\n double y_; ///<障碍物y坐标\n\n double width_; ///<障碍物宽度\n\n double height_; ///<障碍物长度\n\n double yaw_; ///<障碍物运动方向\n\n double speed_; ///<障碍物运动速度\n\n double gps_time_; ///<GPS时间\n\n};\n\n/**\n * @class SensorObstacles\n * @brief 传感器输出的所有障碍物。\n */\nclass SensorObstacles\n{\npublic:\n /**\n * @brief 构造函数\n */\n SensorObstacles()\n {\n num_of_obstacle_ =0;\n obstacles_info_.clear();\n }\n\n /**\n * @brief 析构函数\n */\n ~SensorObstacles()\n {\n\n }\npublic:\n int16_t num_of_obstacle_; ///<障碍物个数\n\n std::vector< ObstacleInfo > obstacles_info_; ///<障碍物列表\n\n};\n\n/**\n * @class ObstacleRect\n * @brief 障碍物类,表示一个障碍物的各种属性。\n */\nclass ObstacleRect\n{\npublic:\n int value_; ///<与该障碍物碰撞的碰撞代价。\n string source_; ///<障碍物信息来源,lidar,radar,camera...\n rect rt_; ///<障碍物包围盒,四个角上的坐标。\n float v_; ///<障碍物运动速度\n float yaw_; ///<障碍物运动方向\n int type_; ///<障碍物类型。\n\n vector<navi_point> pos_seq_; ///<预测障碍物中心点未来运动的轨迹序列\n vector<rect> rt_seq_; ///<预测障碍物包围盒未来运动的轨迹序列\n\n /**\n * @brief 构造函数\n */\n ObstacleRect()\n {\n value_ =0;\n v_=0;\n yaw_=0;\n }\n\n /**\n * @brief 析构函数\n */\n ~ObstacleRect()\n {\n // pos_seq.clear();\n // rt_seq.clear();\n }\n\n /**\n * @brief 对等于号=进行重载,。ObstacleRect类的等号操作\n * @param src 输入量:原始障碍物信息。\n * @return 被赋值的障碍物信息。。\n */\n ObstacleRect& operator= (const ObstacleRect& src)\n {\n this->value_ = src.value_;\n this->v_ = src.v_;\n this->yaw_ = src.yaw_;\n this->rt_ = src.rt_;\n this->type_ = src.type_;\n\n return *this;\n }\n\n};\n\n\n/**\n * @class RoadSurface\n * @brief 路面情况类,路的参考中心线。路面格网,路上的障碍物等信息。。\n */\nclass RoadSurface\n{\npublic:\n ///当前GPS时间\n double gps_time_;\n ///参考中心线。\n path road_path_;\n ///路面格网。静态障碍物信息会投影到格网中。\n patch patchs_[PATCHS_WIDTH][PATCHS_LENGTH];\n ///障碍物信息,包括动态障碍物和静态障碍物。\n vector<ObstacleRect> obstacles_;\npublic:\n /**\n * @brief 构造函数\n */\n RoadSurface();\n /**\n * @brief 析构函数\n */\n ~RoadSurface();\n\n /**\n * @brief 初始化网格,在参考路径上的起始点和终止点直接生成网格。\n * @param start 输入量:参考路径上的起始点。\n * @param end 输入量:参考路径上的终止点。。\n */\n void init(int start, int end);\n\n /**\n * @brief 给道路中心线road_path赋值。\n * @param ref_ 输入量:要输入的参考中心线。\n */\n void set_ref_path(vector<navi_point> ref_);\n};\n\n/**\n * @class CollisionCheck\n * @brief 碰撞检测类,判断本车是否与障碍物有碰撞的可能。\n */\nclass CollisionCheck\n{\nprivate:\n double car_width_; ///<本车长度\n double car_length_; ///<本车宽度\n double safe_width_; ///<本车与障碍物之间的安全保护距离\n\npublic:\n /**\n * @brief 构造函数\n */\n CollisionCheck()\n {\n car_width_ = 0.0;\n car_length_ = 0.0;\n safe_width_ = 0.0;\n }\n /**\n * @brief 析构函数\n */\n ~CollisionCheck()\n {\n\n }\n\n /**\n * @brief 设置车辆等信息\n * @param m_car_width 输入量:车辆宽度。\n * @param m_car_length 输入量:车辆长度。\n * @param m_safe_width 输入量:本车与障碍物之间的安全保护距离。\n */\n int set_car_data( double m_car_width, double m_car_length, double m_safe_width );\n\n /**\n * @brief 计算车辆的外形和矩形位置。考虑安全保护距离。\n * @param rt_car 输出量:车辆矩形框。\n * @param x 输入量:车辆x坐标。\n * @param y 输入量:车辆y坐标。\n * @param heading 输入量:车辆航向角。\n */\n void car_trajectory_collision_check_pos_rect(\n rect& rt_car,\n double x,\n double y,\n double heading);\n\n /**\n * @brief 计算车辆的外形和矩形位置。考虑安全保护距离。\n * @param rt_car 输出量:车辆矩形框。\n * @param x 输入量:车辆x坐标。\n * @param y 输入量:车辆y坐标。\n * @param heading 输入量:车辆航向角。\n */\n void car_lane_collision_check_rect( rect& rt_car, double x, double y, double heading );\n\n /**\n * @brief 计算车辆的外形和矩形位置。考虑安全保护距离。\n * @param rt_car 输出量:车辆矩形框。\n * @param x 输入量:车辆x坐标。\n * @param y 输入量:车辆y坐标。\n * @param heading 输入量:车辆航向角。\n * @param safe_width 输入量:安全保护距离。\n */\n void car_lane_collision_check_rect( rect& rt_car, double x, double y, double heading, double safe_width );\n\n /**\n * @brief 构建一个矩形框。\n * @param x 输入量:矩形框中心x坐标。\n * @param y 输入量:矩形框中心y坐标。\n * @param heading 输入量:矩形框方向。\n * @param width 输入量:矩形框宽度。\n * @param length 输入量:矩形框长度。\n * @param rt_ 输出量:矩形框。\n */\n void construct_rect( double x, double y, double heading, double width, double length, rect& rt );\n\n /**\n * @brief 判断规划产生的虚拟轨迹与障碍物是否有碰撞。\n * @param lane 输入量:规划产生的虚拟轨迹。\n * @param obstacles 输入量:路面障碍物信息。\n * @param car_speed 输入量:本车速度。\n * @param net_speed 输入量:地图限速。\n * @param st_pos 输入量:车当前位置在规划轨迹上的投影点。\n * @param check_length 输入量:车前方的想要做碰撞检测的长度。\n * @param free_length 输出量:如果有碰撞,输出碰撞距离,否则输出-1。\n * @param motion_speed 输出量:根据前车速度、与前车距离以及地图限速计算的规划推荐速度。\n * @param motion_length 输出量:根据前车速度、与前车距离以及地图限速计算的多少米后达到规划推荐速度。\n * @return 返回1表示有碰撞,0表示没有。\n */\n int virtual_lane_col_check( path& lane, vector<ObstacleRect> obstacles, double car_speed, double net_speed, int st_pos, double check_length,\n double& free_length, double& motion_speed, double& motion_length );\n\n /**\n * @brief 判断某个车道与障碍物是否有碰撞。\n * @param lane 输入量:车道中心线。\n * @param obstacles 输入量:路面障碍物信息。\n * @param car_speed 输入量:本车速度。\n * @param st_pos 输入量:车当前位置在规划轨迹上的投影点。\n * @param check_length 输入量:车前方的想要做碰撞检测的长度。\n * @return 返回碰撞距离。\n */\n double lane_col_check( path& lane, vector<ObstacleRect> obstacles, double car_speed, int st_pos, double check_length );\n\n /**\n * @brief 判断某个车道与障碍物是否有碰撞。\n * @param lane 输入量:车道中心线。\n * @param obstacles 输入量:路面障碍物信息。\n * @param start_num 输入量:碰撞检测的起始位置,由地图匹配获得。\n * @param wheel_base_length 输入量:车轴长。\n * @param step 输入量:搜索步长。\n * @param safe_dis 输入量:安全保护距离长。\n * @return 返回1表示有碰撞,0表示没有。\n */\n int park_col_check( path& lane, vector<ObstacleRect> obstacles, int start_num, double wheel_base_length, int step, double safe_dis );\n\n#if 0\n /**\n * @brief 判断两个点在时间维度上是否要交叠。\n * @param p 输入量:第一个点。\n * @param op 输入量:第二个点。\n * @return 返回true表示有交叠,false表示没有。。\n */\n bool cross_in_two_time(navi_point& p, navi_point& op);\n\n /**\n * @brief 判断两个点在时间维度上是否要交叠。\n * @param p 输入量:第一个点。\n * @param op 输入量:第二个点。\n * @return 返回true表示有交叠,false表示没有。。\n */\n bool cross_in_two_time_seq( navi_point& p, obstacle& obj, rect& rt_car );\n\n bool lane_collision_check(path& lane,\n RoadSurface& road,\n double speed,\n int st_pos,\n int en_pos,\n double check_length,\n int& free_length_num,\n double& free_length,\n double& obj_speed,\n double& obj_heading);\n\n bool lane_collision_check_moving_objects(\n path& lane,\n RoadSurface& road,\n int st_pos,\n int en_pos,\n double check_length,\n int& free_length_num,\n double& free_length);\n#endif // 0\n};\n\n\n#endif // _COLLISION_CHECK_H\n"
},
{
"alpha_fraction": 0.7502626776695251,
"alphanum_fraction": 0.7577933669090271,
"avg_line_length": 35.36942672729492,
"blob_id": "f437fd377ce6b48a0b7aacd95ccd2b69cfe0219f",
"content_id": "7144b06485b91f0408c4d5b3633b8eeff6eb98da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5726,
"license_type": "no_license",
"max_line_length": 212,
"num_lines": 157,
"path": "/athena/core/x86/Camera/lane_detect/include/main_proc.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#ifndef\t_MAIN_H_\n#define\t_MAIN_H_\n\n#include \"LaneDetector.h\"\n#include \"LaneDraw.h\"\n#include \"utils/imrgb.h\"\n#include <fstream>\n#include \"utils/config2.h\"\n\n//lcm\n#include \"lane_lcm/line_info.hpp\"\n#include \"lane_lcm/ins_info.hpp\"\n#include \"sensor_lcm/cam_obj_list.hpp\"\n#include \"utils/globalVal.h\"\n\nusing namespace std;\nusing namespace cv;\n\nstatic cv::Mat Lane_output(INPUT_IMAGE_HEIGHT, INPUT_IMAGE_WIDTH, CV_8UC3);\n\nvoid preDetect(cv::Mat& srcImage);\nvoid initConfig(const char* config_file);\ncv::Mat getOutImage();\nvoid getPreLaneInfo(int *lane_quality, vector<point> &mLeftPoints, vector<point> &mRightPoints);\nvoid getPreLaneImageInfo(vector<point> &mLeftImagePoints, vector<point> &mRightImagePoints);\n\n//////////////////////////////////////////////////////////////////////////\nint main_read_image_from_local(double &start_fps_time, double &end_fps_time, Handler &handler);\n#if 1\nstatic IplImage *Im_road_=cvCreateImage(cvSize(ROAD_IMAGE_WIDTH, ROAD_IMAGE_HEIGHT),8,3);\nstatic IplImage *Im_output_=cvCreateImage(cvSize(INPUT_IMAGE_WIDTH,INPUT_IMAGE_HEIGHT),8,3);\nstatic IplImage* frame= cvCreateImage(cvSize(INPUT_IMAGE_WIDTH,INPUT_IMAGE_HEIGHT),IPL_DEPTH_8U,IPL_BORDER_WRAP);\nstatic IplImage *pInputImage=cvCreateImage(cvSize(INPUT_IMAGE_WIDTH,INPUT_IMAGE_HEIGHT),8,1);\nstatic float roadwidth,scale,lateral_offset=0.0,roadwidth_cur,road_scale,roadwidth_pre;\nstatic int Open_RoadImage;\nstatic std::ofstream fout1(\"../Camera/LaneLog.txt\");\nstatic IplImage *Pout=cvCreateImage(cvSize(INPUT_IMAGE_WIDTH,INPUT_IMAGE_HEIGHT),8,3);\nstatic cv::Mat Sign1(INPUT_IMAGE_WIDTH,INPUT_IMAGE_HEIGHT,3);\nstatic cv::Mat Sign2(INPUT_IMAGE_WIDTH,INPUT_IMAGE_HEIGHT,3);\nstatic FILE*\tfp = NULL;\n#define DATA_SIZE 200\n//static FILE* flp=NULL;\n#endif\n\nextern void\tcloseParameterLog(void);\nextern void openParameterLog(void);\nextern void toggleSaveLB_PARAMETER(void);\nextern void openLaneBoundaryLog(void);\n\nextern double getDisplayScale(void);\nextern void setPointOnInputImage(int iIsrc, int iJsrc);\nextern int getIsrcOnInputImage(void);\nextern int getJsrcOnInputImage(void);\n\nextern void upIsrcOnInputImage(void);\nextern void downIsrcOnInputImage(void);\nextern void leftJsrcOnInputImage(void);\nextern void rightJsrcOnInputImage(void);\nextern void upVmarginOnInputImage(void);\nextern void downVmarginOnInputImage(void);\nextern int getVmarginOnInputImage(void);\nextern int getInputImageHeight(void);\nextern int getInputImageWidth(void);\nextern int getOutputImageHeight(void);\nextern int getOutputImageWidth(void);\nextern IMRGB *getImrgbOutput(void);\nextern IMRGB *getImrgbInput(void);\nextern double getVelocityOfVehicle(void);\nextern double getFrontSteerAngle(void);\nextern double getRearSteerAngle(void);\nextern int getHorizontalLineNo(void);\nextern void setHorizontalLineNo(int v);\nextern void setHorizontalLineNoWithScaling(int v);\nextern void upHorizontalLineNo(void);\nextern void downHorizontalLineNo(void);\nextern int getVerticalLineNo(void);\nextern void setVerticalLineNo(int v);\nextern void setVerticalLineNoWithScaling(int v);\nextern void toLeftVerticalLineNo(void);\nextern void toRightVerticalLineNo(void);\n\n//////////////////////////////////////////////////////////////////////////\nextern IMRGB * getInputImage(IplImage *inputimage);\n//////////////////////////////////////////////////////////////////////////\nextern IMRGB * getInputImage(void);\n\nextern int getFrameSkip(void);\nextern void setFrameSkip(int v);\nstatic void makeOutputImage(void);\nstatic void makeOutputRoadImage(IMRGB *pImrgb);\nstatic BOOL remakeOutputImage(void);\nstatic BOOL remakeAllImage(void);\n//static BOOL mainProc(void);\n\nextern DB getVehicleSpeed(void);\n\n//static void clear_imrgb_out(void);\n//extern char *getOutputAVIFilePath(void);\nstatic void deleteLaneDetector(void);\nextern int create3DViewWindow(void);\nextern int proc3DViewWindow(void);\nextern void make3DImage(LaneDetector *pLaneDetector);\nstatic void clearImrgbOutput(void);\n\nvoid calRoadLinePoints(int iTopIroadLeft, int iTopIroadRight, int iTopJroadLeft, int iTopJroadRight, int iBottomIroadLeft, int iBottomIroadRight, int iBottomJroadLeft, int iBottomJroadRight);\nvoid publish_line_info_to_control(vector<obu_camera::line_point> &left_points, vector<obu_camera::line_point> &right_points, vector<obu_camera::line_point> ¢er_points, double lane_width, int judge_lane_flag);\nvoid publish_line_info_to_control(vector<obu_camera::line_point> &left_points, vector<obu_camera::line_point> &right_points, int judge_lane_flag);\nvoid readVideo(string path);\nvoid on_Trackbar(int, void*);\nvoid ImageText(Mat* img, const char* text, int x, int y);\n\n\nstatic LaneParameter g_LaneParameterPrevious;\nstatic LaneParameter *getLaneParameterPrevious(void)\n{\n return &g_LaneParameterPrevious;\t //之前的data也非常有用\n}\nstatic LaneDetector *g_pLaneDetector = NULL;\nstatic LaneDetector *getLaneDetector(void)\n{\n return g_pLaneDetector;\n}\n\nstatic void deleteLaneDetector(void)\n{\n SAFE_DELETE(g_pLaneDetector);\n}\n\n#define\tDUMMY_DATA\t(-9999)\n\n#define\tMAX_DISTANCCE_NUM\t(20 + 1)\nstatic double g_adDistanceOfHorizontalLine[MAX_DISTANCCE_NUM] = { -1\t};\nstatic int g_aiPosOfHorizontalLine[MAX_DISTANCCE_NUM];\nstatic int g_iNumOfHorizontalLine = 0;\nstatic int getNumOfHorizontalLine(void)\n{\n return g_iNumOfHorizontalLine;\n}\n\nstatic double getDistanceOfHorizontalLine(int iIdx)\n{\n if(iIdx < 0)\treturn -1;\n if(iIdx >= getNumOfHorizontalLine())\treturn -1;\n return g_adDistanceOfHorizontalLine[iIdx];\n}\n\nstatic int getPosOfHorizontalLine(int iIdx)\n{\n if(iIdx < 0)\treturn -1;\n if(iIdx >= getNumOfHorizontalLine())\treturn -1;\n return g_aiPosOfHorizontalLine[iIdx];\n}\n\nstatic int g_aiSelectedDistance[MAX_DISTANCCE_NUM] = {-1};\nstatic int g_iSelectedDistanceNum = 0;\n\n#endif\t//_MAIN_H_\n"
},
{
"alpha_fraction": 0.4738593101501465,
"alphanum_fraction": 0.5380228161811829,
"avg_line_length": 40.235294342041016,
"blob_id": "2c3a75dbda43c564b88b033dda0a5ca8fcfddec3",
"content_id": "ca91b9df8b6a05c2f0d03be108a8e44cdfb7c917",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2158,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 51,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/matutil-d.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "SHIFT_JIS",
"text": "///////////////////////////////////////////////////////////////////////////////\n//\tfile:\tmatutil-d.h\n// author: krutch_zhou\n// email:[email protected]\n//\t$modified: 2015/06/23\n//\t$Log: matutil-d.h,v $\n///////////////////////////////////////////////////////////////////////////////\n/******************************************/\n/***\t 行列計算用ヘッダファイル\t\t***/\n/******************************************/\n#include \"config.h\"\n\n/* sslib.h include file for sslib */\n#ifndef _MATUTIL_D_H_\n#define _MATUTIL_D_H_\n\n#include <math.h>\n#ifndef\tFOR_WTP\n#include \"type.h\"\n#endif\tFOR_WTP\n\n\n/******************************************/\n/***\t\t使用関数プロトタイプ宣言\t\t***/\n/******************************************/\n/* matrix.c */\nVD dmadd(DB a[], DB b[], DB c[], S4 m, S4 n);\nVD dmsub(DB a[], DB b[], DB c[], S4 m, S4 n);\nVD dmmul1(DB a[], DB b[], S4 m);\nVD dmmul2(DB a[], DB b[], DB c[], S4 m, S4 n, S4 k);\nVD dmtra1(DB a[], S4 m, S4 n);\nVD dmtra2(DB a[], DB b[], S4 m, S4 n);\nDB dminver(DB a[], DB b[], S4 l, S4 m, DB eps);\nVD dmmove(DB a[], DB b[], S4 m, S4 n);\nVD dmswap(DB a[], DB b[], S4 m, S4 n);\n\n// V added by watanabe at 20090914\nextern BOOL dminver2(DB a[], DB b[], S4 l, S4 m);\n// A added by watanabe at 20090914\n// V added by watanabe at 20090918\nextern VD dmUDdecomposition(DB *dbS, DB *dbU, DB *dbD, S4 s4_Dim);\n// A added by watanabe at 20090918\n// V added by watanabe at 20090923\nextern VD dmColumnVector(DB *adbSrc, S4 s4_height, S4 s4_width, DB *adbDst, S4 s4_column);\nextern VD dmRawVector(DB *adbSrc, S4 s4_height, S4 s4_width, DB *adbDst, S4 s4_raw);\nextern VD dmPutSubMatrix(DB *adbDst, S4 s4_DstHeight, S4 s4_DstWidth, DB *adbSub, S4 s4_SubHeight, S4 s4_SubWidth, S4 s4_raw, S4 s4_column);\nextern VD dmSetZero(DB *adbDst, S4 s4_height, S4 s4_width);\nextern VD dmAddSubMatrix(DB *adbDst, S4 s4_DstHeight, S4 s4_DstWidth, DB *adbSub, S4 s4_SubHeight, S4 s4_SubWidth, S4 s4_raw, S4 s4_column);\nextern VD dmSubSubMatrix(DB *adbDst, S4 s4_DstHeight, S4 s4_DstWidth, DB *adbSub, S4 s4_SubHeight, S4 s4_SubWidth, S4 s4_raw, S4 s4_column);\n// A added by watanabe at 20090923\n#endif\n\n"
},
{
"alpha_fraction": 0.4719737768173218,
"alphanum_fraction": 0.480322003364563,
"avg_line_length": 29.216217041015625,
"blob_id": "8b3008632f0b316243aa8ed22207141cc482b3ff",
"content_id": "bb977e3759d3333da7679dc658a85ea2225fba41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3808,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 111,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/msg/ne_msg/ne_msg_t.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef __ne_msg_t_hpp__\n#define __ne_msg_t_hpp__\n\n//#include \"nad_base.h\"\n#include \"ne_msg_base_t.hpp\"\n\n#include \"config/nad_config.h\"\n\n//目前暂定默认消息版本号是1\n#define MSG_VER_1 1\n\nnamespace nad_lcm\n{\n\n//ne_msg_t仅用于网元间通讯,使用publish_nemsg发消息,不能直接使用ne_msg_base_t通讯!!\n//网元内通讯lcm原生的方法publsh,不要使用ne_msg_t\ntemplate <class T>\nclass ne_msg_t : public ne_msg_base_t\n{\npublic:\n //消息体,如:fam/msg/nad_msg_co中的消息\n T body;\n\npublic:\n //构造函数\n ne_msg_t() : ne_msg_base_t()\n {\n //header.version = MSG_VER_1;\n header.version = 1; //暂未使用\n header.peer_ne_name = \"\"; //接受者的网元名,如obu_name、rsu_name、csu_name\n header.peer_channel = \"\"; //对方的信道名,默认与消息名相同,如cr_info_report\n header.local_ne_name = \"\"; //默认使用全局配置中的本网元名称\n header.local_channel = \"\"; //暂未使用\n header.session_name = \"\"; //暂未使用\n header.type = 0; //暂未使用\n header.time = 0; //暂未使用\n header.opt = 0; //暂未使用\n }\n\n //构造函数\n ne_msg_t(std::string peer_ne_name, std::string peer_channel) : ne_msg_base_t()\n {\n header.version = MSG_VER_1; //暂未使用\n header.peer_ne_name = peer_ne_name; //接受者的网元名,如obu_name、rsu_name、csu_name\n header.peer_channel = peer_channel; //对方的信道名,默认与消息名相同,如cr_info_report\n header.local_ne_name = \"\"; //默认使用全局配置中的本网元名称\n header.local_channel = \"\"; //暂未使用\n header.session_name = \"\"; //暂未使用\n header.type = 0; //暂未使用\n header.time = 0; //暂未使用\n header.opt = 0; //暂未使用\n }\n\n //把ne_msg_t.body数据写入ne_msg_t.data\n inline void encode_body()\n {\n //body->data\n int len = body.getEncodedSize();\n char buf2[len];\n data_len = body.encode(buf2, 0, len);\n if (data_len != len)\n {\n std::cout << \"nad_msg::encode error!\" << std::endl;\n }\n data.assign(buf2, buf2 + len);\n }\n\n //从消息流读数据到ne_msg_t\n inline int decode(const void *buf, int offset, int maxlen)\n {\n //调用父类的decode\n int pos = ne_msg_base_t::decode(buf, offset, maxlen);\n if (pos <= 0)\n {\n return pos;\n }\n\n //data->body\n int len = 0;\n std::string buf2(data.begin(), data.end());\n\n len = body.decode((void *)buf2.c_str(), 0, data_len);\n if (len < 0)\n {\n std::cout << \"ne_msg_t::decode chan=\" << header.peer_channel << std::endl;\n std::cout << \"ne_msg_t::decode name=\" << header.local_ne_name<< std::endl;\n std::cout << \"nad_msg::decode error!\" << std::endl;\n }\n\n return pos;\n }\n\n //从消息流读数据到ne_msg_t\n inline void decode_body(T &msg_body) const\n {\n //data->body\n int len = 0;\n std::string buf2(data.begin(), data.end());\n len = msg_body.decode((void *)buf2.c_str(), 0, data_len);\n if (len < 0)\n {\n std::cout << \"nad_msg::decode error!\" << std::endl;\n std::cout << \"ne_msg_t body::decode chan=\" << header.peer_channel << std::endl;\n }\n }\n\n};\n\n}\n\n#endif // __ne_msg_t_hpp__\n"
},
{
"alpha_fraction": 0.5367521643638611,
"alphanum_fraction": 0.5615384578704834,
"avg_line_length": 17.571428298950195,
"blob_id": "cf2b8a2f2ee03f7d853c092902b49d600fcec166",
"content_id": "9594f286def446deb09e4537f88ce8d79c5b9365",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3696,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 126,
"path": "/athena/core/x86/Planning/include/common/point.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 表示一个点的属性和相关计算。\n */\n\n#pragma once\n\n#include <math.h>\n#include <vector>\n\n/**\n * @class point\n * @brief 点属性类。\n */\nclass point\n{\npublic:\n /// 传感器相对坐标\n float x;\n float y;\n float z;\n\n /// 绝对坐标\n float xx;\n float yy;\n float zz;\n\npublic:\n /**\n * @brief 构造函数\n */\n point()\n {\n x=y=z=0;\n xx=yy=zz=0;\n }\n /**\n * @brief 析构函数\n */\n ~point()\n {\n\n }\n\n /**\n * @brief 绝对坐标系下,判断该点是否在p1,p2,p3,p4构成的格子内。\n * @param p1 输入量:第一个点的坐标(绝对坐标系坐标)。\n * @param p2 输入量:第二个点的坐标(绝对坐标系坐标)。\n * @param p3 输入量:第三个点的坐标(绝对坐标系坐标)。\n * @param p4 输入量:第四个点的坐标(绝对坐标系坐标)。\n * @return 1表示点落在网格内,0表示没有。。\n */\n bool is_in_rect(point& p1, point& p2,\n point& p3, point& p4);\n\n /**\n * @brief 传感器相对坐标系下,判断该点是否在p1,p2,p3,p4构成的格子内。\n * @param p1 输入量:第一个点的坐标(传感器相对坐标系坐标)。\n * @param p2 输入量:第二个点的坐标(传感器相对坐标系坐标)。\n * @param p3 输入量:第三个点的坐标(传感器相对坐标系坐标)。\n * @param p4 输入量:第四个点的坐标(传感器相对坐标系坐标)。\n * @return 1表示点落在网格内,0表示没有。。\n */\n bool is_in_rect_xx(point& p1, point& p2,\n point& p3, point& p4);\n\n /**\n * @brief 对等于号=进行重载,point类的等号操作符。\n * @param src 输入量:原始输入点。\n * @return 被赋值的点信息。。\n */\n point& operator= (const point& src)\n {\n this->x = src.x;\n this->y = src.y;\n this->z = src.z;\n\n this->xx = src.xx;\n this->yy = src.yy;\n this->zz = src.zz;\n\n return *this;\n }\n};\n\n/**\n* @brief 判断两点是否相同。\n* @param p1 输入量:第一个点的坐标(绝对坐标系坐标)。\n* @param p2 输入量:第二个点的坐标(绝对坐标系坐标)。\n* @return 返回1表示相同,0表示不同。\n*/\nbool samep(point p1, point p2);\n\n/**\n* @brief 计算两点之间的距离。\n* @param p1 输入量:第一个点的坐标(绝对坐标系坐标)。\n* @param p2 输入量:第二个点的坐标(绝对坐标系坐标)。\n* @return 返回两点之间的距离。\n*/\ndouble dist(point p1,point p2);\n\n/**\n* @brief 已知两点,求连线中点坐标。\n* @param p1 输入量:第一个点的坐标(绝对坐标系坐标)。\n* @param p2 输入量:第二个点的坐标(绝对坐标系坐标)。\n* @return 返回两点连线中点坐标。\n*/\npoint midpoint(point p1, point p2);\n\n/**\n* @brief 绝对坐标系下,叉乘计算 |p1 p2| X |p1 p|。\n* @param p1 输入量:点的坐标(绝对坐标系坐标)。\n* @param p2 输入量:点的坐标(绝对坐标系坐标)。\n* @param p 输入量:点的坐标(绝对坐标系坐标)。\n* @return 返回叉乘结果。\n*/\nfloat get_cross(point& p1, point& p2, point& p);\n/**\n* @brief 传感器相对坐标系下,叉乘计算 |p1 p2| X |p1 p|。\n* @param p1 输入量:点的坐标(传感器相对坐标系坐标)。\n* @param p2 输入量:点的坐标(传感器相对坐标系坐标)。\n* @param p 输入量:点的坐标(传感器相对坐标系坐标)。\n* @return 返回叉乘结果。\n*/\nfloat get_cross_xx(point& p1, point& p2, point& p);\n"
},
{
"alpha_fraction": 0.5370942950248718,
"alphanum_fraction": 0.5471406579017639,
"avg_line_length": 20.93220329284668,
"blob_id": "e5f4fb05f1e8cf2293ef339566d82d6770f211fe",
"content_id": "57f4a7eb78d123fda4cbbbdef5309531d0eb39ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1336,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 59,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/msg/zmq/zmq_t.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:zmq_t.h\n * 创建者:王飞虎\n * 时 间:2016-08-12\n * 描 述:zmq的公共类\n-------------------------------------------------------*/\n\n#ifndef _ZMQ_T_H_\n#define _ZMQ_T_H_\n\n\nextern \"C\"{\n#include \"lcm/lcm_internal.h\"\n}\n#include \"lcm/udpm_util.h\"\n#include \"lcm/lcm-cpp.hpp\"\n#include \"zmq.hpp\"\n\n\n#include \"ne_msg/ne_msg_base_t.hpp\"\n#include \"ne_msg/ne_msg_t.hpp\"\n\n\nclass zmq_t\n{\npublic:\n zmq::context_t _context;\n\npublic:\n pthread_spinlock_t spinlock_zmq;\n\npublic:\n zmq_t();\n virtual ~zmq_t();\n\n void dispatch_handlers(nad_lcm::ne_msg_base_t &ne_msg);\n\n virtual void communicate_send(const lcm::ReceiveBuffer* rbuf, const std::string& chan,\n const nad_lcm::ne_msg_base_t* ne_msg) = 0;\n\n template<typename T>\n inline void send_nemsg(nad_lcm::ne_msg_t<T> &ne_msg)\n {\n ne_msg.encode_body();\n uint32_t len = ne_msg.getEncodedSize();\n uint8_t buf[len];\n ne_msg.encode(buf, 0, len);\n nad_lcm::ne_msg_base_t *ne_base_msg = static_cast<nad_lcm::ne_msg_base_t *>(&ne_msg);\n\n lcm::ReceiveBuffer rbuf;\n rbuf.data = buf;\n rbuf.data_size = len;\n rbuf.recv_utime = lcm_timestamp_now();\n\n communicate_send(&rbuf, \"NEMSG_SKIP_LCM\", ne_base_msg);\n }\n};\n\n#endif\n"
},
{
"alpha_fraction": 0.5371828675270081,
"alphanum_fraction": 0.5468066334724426,
"avg_line_length": 15.808823585510254,
"blob_id": "2459c89f7368c0b99694795f94b113193caf60ce",
"content_id": "6906903ba86e5f5232f0cfde04398f33758a135e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1269,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 68,
"path": "/athena/core/arm/Control/include/common/map_matching/localization_.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file localization.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef LOCALIZATION__H_\n#define LOCALIZATION__H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class Localization\n *\n * @brief Location information.\n */\nclass Localization_\n{\n// public:\n// Localization_() = default;\n// ~Localization_() = default;\n// ///GPS时间\n// double gps_time_;\n// ///周\n// int week_;\n// ///经度\n// double lat_;\n// ///纬度\n// double lon_;\n// ///海拔\n// double height_;\n// ///横向速度\n// double lateral_speed_;\n// ///纵向速度\n// double longitudinal_speed_;\n// ///地向速度\n// double down_speed_;\n// ///横滚角度\n// double roll_;\n// ///俯仰角度\n// double pitch_;\n// ///航向角度\n// double heading_;\n// ///横向加速度\n// double lateral_accelerate_;\n// ///纵向加速度\n// double longitudinal_accelerate_;\n// ///地向加速度\n// double down_accelerate_;\n// ///横滚角速度\n// double roll_speed_;\n// ///俯仰角速度\n// double pitch_speed_;\n// ///航向角速度\n// double heading_speed_;\n};\n}\n}\n\n#endif // LOCALIZATION_H_\n"
},
{
"alpha_fraction": 0.5379229784011841,
"alphanum_fraction": 0.5705950856208801,
"avg_line_length": 25.78125,
"blob_id": "b6826184407c92298e175db7e1bdc92d2ac8ecf0",
"content_id": "5b94beabbdc804a143d884057bed79829892ac62",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1115,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 32,
"path": "/athena/core/arm/Common/include/oam/task/nad_task_func.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_task_func.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:二期脚本中的函数\n-------------------------------------------------------*/\n#ifndef _NAD_TASK_FUNC_H\n#define _NAD_TASK_FUNC_H\n\n#include \"nad_function.h\"\n//一个脚本自定义函数\nclass nad_task_func\n{\npublic:\n string name; //函数名\n map<string, string> paras; //参数表,<参数名, 参数值>\n\n //获得参数值\n bool has_para(const char *name);\n const int64_t get_para(const char *name, const int64_t default_value);\n const char *get_para(const char *name, const char *default_value);\n const double get_para(const char *name, const double default_value);\n\n //保存到消息中,格式是多行文本,分别保存:函数名,参数名1,参数值1,参数名2,参数值2…\n string save_to_msg();\n\n //从消息中加载,格式是多行文本,分别保存:函数名,参数名1,参数值1,参数名2,参数值2…\n void load_from_msg(const string &task_func);\n};\n\n\n#endif\n"
},
{
"alpha_fraction": 0.5944827795028687,
"alphanum_fraction": 0.6041379570960999,
"avg_line_length": 16.682926177978516,
"blob_id": "ab4b6a3ad71857f95ec73a4cd3555ea2326afe60",
"content_id": "1e4e7e1f0d2a41a2f97ce5f9d69cc32326bb3fbc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 941,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 41,
"path": "/athena/cc/camera/lane_detect/lane_utils.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <opencv/cv.h>\n#include <opencv/highgui.h>\n#include <opencv2/highgui/highgui.hpp>\n#include \"opencv2/imgproc/imgproc.hpp\"\n#include \"opencv2/core/core.hpp\"\n#include <vector>\n\nstruct point\n{\n int x,y;\n};\n\n/** @brief 设置需要识别车道线的图片\n *\n * @param srcImage 输入图片\n * @return\n *\n */\nvoid setImage(cv::Mat& srcImage);\n\n/** @brief 初始化相机参数\n *\n */\nvoid init();\n\n\n/** @brief 获取识别车道线后的图片\n *\n * @return 带车道线识别的图片\n *\n */\ncv::Mat getResultImage();\n\n\n/** @brief 获取车道线质量:0:没有识别到车道线\\n\n * 1:只识别出右车道线\\n\n * 2:只识别出左车道线\\n\n * 3:只识别出双车道线\\n\n * 返回左右车道线点集(相对相机坐标,单位:mm)\n */\nvoid getLaneInfos(int *lane_quality, std::vector<point> &leftPoints, std::vector<point> &rightPoints);\n"
},
{
"alpha_fraction": 0.5361445546150208,
"alphanum_fraction": 0.5469879508018494,
"avg_line_length": 30.923076629638672,
"blob_id": "41afd6f6af8de2a21a63bccd97a79d0e08ccfcc7",
"content_id": "71937bd3392518ee4c9fa0d02638a8279cd4544e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 850,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 26,
"path": "/athena/core/arm/Control/include/common/map_matching/heading.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <math.h>\n#include <vector>\n#include \"spline.h\"\n#include \"../../controller_config.h\"\n\ndouble calculate_R(double steering,\n double Ux,const athena::control::ControllerConfig *controller_config);\n\n// 求取相关点的头指向。\n int get_heading(double x1, double y1,\n double x2, double y2, double& h2);\n\n int get_steering_angle_h(double h1, double h2,\n double s, double& st, double& st_r);\n\n int get_steering_angle(double h1, double h2,\n double s, double& st);\n\n double cau_heading_angle_from_ks(tk::spline s_x,tk::spline s_y,\n double cs,\n double every);\n\ndouble cau_heading_angle_from_ks(tk::spline s_x, tk::spline s_y,\n double cs);\n"
},
{
"alpha_fraction": 0.5641025900840759,
"alphanum_fraction": 0.576168954372406,
"avg_line_length": 25.520000457763672,
"blob_id": "0770f45287c7eee56007f51d300dfb7ddb934f66",
"content_id": "45911ded24227ca21a53c82cb043cf15bb3837f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2115,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 75,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/log/nad_ui_log.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_ui_log.cpp\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:向OCT和VUI发日志\n-------------------------------------------------------*/\n\n//头文件\n#include \"nad_ui_log.h\"\n#include \"nad_type.h\"\n\n#if defined(_NAD_CSU_) || defined(_NAD_RSU_) || defined(_NAD_OBU_)\n\n//基本参数的赋值\n#define LOG_SET_PARA(msg) \\\n msg.log_proc = PROC_TYPE; \\\n msg.log_time = current_datetime_str(); \\\n msg.log_level = log_level; \\\n msg.log = log;\n\n#endif\n\n#ifdef _NAD_CSU_\n\n//向oct发日志,封装了cu_log_report\nvoid log_report_to_oct(string oct_name, int32_t log_level, string log)\n{\n ne_msg_t<nad_lcm::cu_log_report> msg(oct_name, \"cu_log_report\");\n LOG_SET_PARA(msg.body);\n //g_lcm->publish_nemsg(msg);\n g_csu_zmq->send_nemsg<cu_log_report>(msg);\n LOG_SEND(log_cu_log_report(&msg));\n LOG(INFO) << \"log_report_to_oct(\" << oct_name << \"): \" << log;\n}\n\n#endif\n\n#ifdef _NAD_RSU_\n\n//向oct发日志,封装了rc_log_report\nvoid log_report_to_oct(int32_t log_level, string log)\n{\n ne_msg_t<nad_lcm::rc_log_report> msg(g_config.csu.name, \"rc_log_report\");\n LOG_SET_PARA(msg.body);\n //g_lcm->publish_nemsg(msg);\n g_rsu_zmq->send_nemsg<rc_log_report>(msg);\n LOG_SEND(log_rc_log_report(&msg));\n LOG(INFO) << \"log_report_to_oct: \" << log;\n}\n\n//向vui发日志,封装了ro_log_report\nvoid log_report_to_vui(string obu_name, int32_t log_level, string log)\n{\n ne_msg_t<nad_lcm::ro_log_report> msg(obu_name, \"ro_log_report\");\n LOG_SET_PARA(msg.body);\n //g_lcm->publish_nemsg(msg);\n g_rsu_zmq->send_nemsg<ro_log_report>(msg);\n LOG_SEND(log_ro_log_report(&msg));\n LOG(INFO) << \"log_report_to_vui: \" << log;\n}\n#endif\n\n#ifdef _NAD_OBU_\n\n//向vui发日志,封装了ou_log_report\nvoid log_report_to_vui(int32_t log_level, string log)\n{\n nad_lcm::ou_log_report msg;\n LOG_SET_PARA(msg);\n g_lcm->publish(\"ou_log_report\", &msg);\n LOG_SEND(log_ou_log_report(&msg));\n LOG(INFO) << \"log_report_to_vui: \" << log;\n}\n\n#endif\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.5434027910232544,
"avg_line_length": 22.510204315185547,
"blob_id": "e0f3db318705e3d31437a5f2a7f7a3cd0e1db87b",
"content_id": "16b3178a00736d59e3c05758c81369ba868631ea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1264,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 49,
"path": "/athena/core/x86/Control/include/controller_alarm_code.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller_output_alarm.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_ALARM_CODE_H_\n#define CONTROLLER_ALARM_CODE_H_\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n\n/**\n * @class ControllerOutputAlarm\n *\n * @brief 控制警报输出.\n */\nenum ControllerAlarmCode:int32_t\n{\n OK = 0, /**< OK*/\n CONTROLLER_INIT_ERROR = 1000,/**< 初始化失败*/\n CONTROLLER_OFF_TRACK_ERROR = 1000,/**< 偏离轨迹*/\n CONTROLLER_TRAJECTORY_IS_SHORT_OR_LONG_ERROR = 1001,/**< 剩余轨迹少于10米*/\n CONTROLLER_TRAJECTORY_ERROR = 1002,/**< 轨迹错误*/\n CONTROLLER_TRAJECTORY_SET_TIMEOUT = 1003,/**< 轨迹设置超时*/\n CONTROLLER_COMPUTE_TIMEOUT = 1004,/**< 控制器计算超时*/\n CONTROLLER_COMPUTE_ERROR = 1005,/**< 控制器计算错误*/\n};\n\nenum ControllerAlarmCodeLevel:int32_t\n{\n NORMAL = 0,\n LOW_WARNING = 1,/**< 轻度警告*/\n HIGH_WARNING = 2,/**< 重度警告*/\n ERROR = 3,/**< 错误*/\n};\n}\n}\n\n#endif // CONTROLLER_OUTPUT_H_\n"
},
{
"alpha_fraction": 0.6027202010154724,
"alphanum_fraction": 0.6154145002365112,
"avg_line_length": 27.07272720336914,
"blob_id": "0051da361e3bf72f53ab6ea282c8c65247bca646",
"content_id": "09924128e57e93bf294f415a6f8648fdde4216e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9172,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 275,
"path": "/athena/core/x86/Common/include/base/nad_function.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_function.h\n * 时 间:2016-03-02\n * 描 述:公共基础函数\n-------------------------------------------------------*/\n#ifndef _NAD_FUNCTION_H\n#define _NAD_FUNCTION_H\n\n\n//公共数据类型\n#include \"nad_type.h\"\n\n//公共返回码\n#include \"nad_retcode.h\"\n\n//公共枚举值\n#include \"nad_enum.h\"\n\n#include \"LocalGeographicCS.hpp\"\n#include \"config/Config.h\"\n#include \"math_util.h\"\n\n/*-------------------------------------------------------\n * 公共基础函数测试桩\n-------------------------------------------------------*/\n\nvoid test_function();\n\n\n/*-------------------------------------------------------\n * 公共基础函数\n-------------------------------------------------------*/\n\n//获取当前时间(毫秒)\nint64_t get_current_time();\n\n//获取当前时间(形如: 12:34:56)\nstring current_time_str();\n\n//获取当前日期时间(形如: 2016-08-12 12:34:56)\nstring current_datetime_str();\n\n//二进制字符串转十进制整数,用于block_value的转化\nint32_t get_block_value(string block_value);\n\n//十进制整数转二进制字符串,用于block_value的转化\nstring get_block_value(int32_t block_value, int lane_count);\n\n//利用lon和lat来拼接出一个字符串\"lon-lat-source\",用于给session当主键,默认source是\"\"\nstring build_session_key(double lon, double lat, string source = \"\");\n\n//从id中获得经度和纬度,兼容\"lon|lat\"和\"lon|lat|source\"两种情况\nvoid get_lon_lat(double &lon, double &lat, string id);\n\n//从id中获得经度、纬度和lane_id,没有lane_id填0\nvoid get_lon_lat_id(double &lon, double &lat, int32_t &lane_id, string id);\n\n\n/*-------------------------------------------------------\n * 字符串函数\n-------------------------------------------------------*/\n\n//求sub1,sub2在str中所夹的字符串,例如between(\"1#23%4\", \"#\", \"%\")返回\"23\"\n//sub1或sub2找不到返回“”。sub1为null则从头截取;sub2位null则截取到末尾\nstring sbetween(const char *str, const char *sub1, const char *sub2);\nstring sbetween(const string &str, const char *sub1, const char *sub2);\n\n//求字符串前几个字符或后几个字符是不是sub\nbool scontain(const char *str, const char *sub);\nbool scontain(const string &str, const char *sub);\nbool srcontain(const char *str, const char *sub);\nbool srcontain(const string &str, const char *sub);\n\n//剪切字符串\nstring &trim(string &str); //修改了字符串本身\nstring strim(const char *str); //新建string保存结果\nstring strim(const string &str);\n\n//截取字符串的左len长度或右len长度\nstring sleft(const char *str, const int len);\nstring sleft(const string &str, const int len);\nstring sright(const char *str, const int len);\nstring sright(const string &str, const int len);\n\n//截取字符串的左(strlen-len)长度或右(strlen-len)长度\nstring sleftbut(const char *str, const int len);\nstring sleftbut(const string &str, const int len);\nstring srightbut(const char *str, const int len);\nstring srightbut(const string &str, const int len);\n\n//查找替换\nstring sreplace(const char *str, const char *from_str, const char *to_str);\nstring sreplace(const string &str, const char *from_str, const char *to_str);\n\n//是不是变量名\nbool is_word(const char *str);\nbool is_word(const string &str);\n\n//是不是整数\nbool is_int(const char *str);\nbool is_int(const string &str);\n\n//是不是浮点数\nbool is_float(const char *str);\nbool is_float(const string &str);\n\n//大小写转化\nstring stoupper(const char *str);\nstring stoupper(const string &str);\nstring stolower(const char *str);\nstring stolower(const string &str);\n\n//获得配置项(如\"delay=300\")的左侧(trim&&tolower)和右侧(trim)\nstring get_left(const char *str);\nstring get_left(const string &str);\nstring get_right(const char *str);\nstring get_right(const string &str);\n\n//求子字符串sub在str中的出现次数,如\"1,2,3\"中\",\"出现了2次\nint sub_count(const char *str, const char *sub);\nint sub_count(const string &str, const char *sub);\n\n\n/*-------------------------------------------------------\n * 字符串数组类\n-------------------------------------------------------*/\n\nclass stringlist : public vector<string>\n{\npublic:\n //继承vector<string>的常用函数\n //assign(beg,end),at(idx),back(),begin(),capacity(),clear(),empty(),end()\n //erase(pos),erase(beg,end),front(),insert(pos,elem),insert(pos,n,elem)\n //insert(pos,beg,end),max_size(),pop_back(),push_back(elem),rbegin()\n //rend(),resize(num),reserve(),size(),swap(c1,c2)\n\n //从文件中加载,成功返回RET_OK\n int load_from_file(const char *filename);\n int load_from_file(const string &filename);\n\n //从文件中加载,成功返回RET_OK\n int save_to_file(const char *filename);\n int save_to_file(const string &filename);\n\n //从间隔字符串中加载,如str=\"a|b|c\", part=\"|\",则插入三个字符串\"a\"、\"b\"、\"c\"\n void load_from_part(const char *str, const char *part);\n void load_from_part(const string &str, const char *part);\n\n //保存为间隔字符串,承上例,当part=\"|\"时,返回\"a|b|c\"\n string save_to_part(const char *part);\n\n //从类似C语言的语句中加载,如cmd=\"const trim(str=\"wuhan\") //test\",\n //则插入七个字符串\"const\"、\"trim\"、\"(\"、\"str\"、\"=\"、\"wuhan\"、\")\"\n //字符串已经去掉了双引号,//注释不会加入进来\n int load_from_cmd(const char *cmd);\n int load_from_cmd(const string &cmd);\n\n //所有的字符串做trim,del_blank=删除空字符串,del_comment=删除//注释\n void trim(bool del_blank = false, bool del_comment = false);\n\n //查找字符串,返回下表,找不到返回-1\n int find(const char *str);\n int find(const string &str);\n\n //调试打印\n void show();\n\n //测试代码\n void test();\n};\n\n/**\n* @brief coord_transfer,完成经纬度和高斯投影面xy之间坐标的转换。\n* 包含原点的设定\n*/\nclass coord_transfer\n{\npublic:\n LocalGeographicCS cs;\n\n coord_transfer();\n coord_transfer(std::string filename);\n coord_transfer(double lat, double lon);\n ~coord_transfer();\n\n void set_origin(std::string filename);\n void set_origin(double lat, double lon);\nprivate:\n Config configSettings;\n double origin_lat = 0;///<投影原点纬度.\n double origin_lon = 0;///<投影原点经度.\n};\n\n/*-------------------------------------------------------\n * 文件函数\n-------------------------------------------------------*/\n\n//不同系统的路径间隔符\n#ifdef _WIN32\n #define phc '\\\\'\n #define phs \"\\\\\"\n#else\n #define phc '/'\n #define phs \"/\"\n#endif\n\n//补充完整路径,如\"/opt/huawei\"修改成\"/opt/huawei/\"\nstring full_path(const char *path);\nstring full_path(const string &path);\n\n//获得路径名,如\"/opt/huawei/demo.txt\"返回\"/opt/huawei\"\nstring get_path(const char *path);\nstring get_path(const string &path);\n\n//获得文件名,如\"/opt/huawei/demo.txt\"返回\"demo.txt\"\nstring get_filename(const char *path);\nstring get_filename(const string &path);\n\n//获得基础名,如\"/opt/huawei/demo.txt\"返回\"demo\"\nstring get_basename(const char *path);\nstring get_basename(const string &path);\n\n//获得扩展名,如\"/opt/huawei/demo.txt\"返回\"txt\"\nstring get_extname(const char *path);\nstring get_extname(const string &path);\n\n//获得一个目录下的所有文件,path形如“d:\\\\z\\\\*.txt”,中文path注意字符集问题\nvoid get_file_list(stringlist &result, const char *path);\nvoid get_file_list(stringlist &result, const string &path);\n\n\n/*-------------------------------------------------------\n * 坐标转换函数\n-------------------------------------------------------*/\nbool outOfChina(double lat, double lon) ;\ndouble transformLat(double x, double y);\ndouble transformLon(double x, double y);\n/**\n * 地球坐标转换为火星坐标\n * World Geodetic System ==> Mars Geodetic System\n *\n * @param wgLat 地球坐标\n * @param wgLon\n *\n * mglat,mglon 火星坐标\n */\nvoid transform2Mars(double wgLat, double wgLon,double &mgLat,double &mgLon);\n/**\n * 火星坐标转换为地球坐标\n * Mars Geodetic System ==>World Geodetic System\n *\n * @param mglat 火星坐标\n * @param mglon\n *\n * wgLat wgLon地球坐标\n */\nvoid transform2Wgs(double mgLat, double mgLon, double &wgLat, double &wgLon);\nvoid string2Wgs(string &id);\n\n//从\"点1->点2\"的方向向右旋转90度构成\"点1->点3\",且\"点1-点3\"间距meter米\nvoid turn_right90(double x1, double y1, double x2, double y2, double offset, double &x3, double &y3);\n\n/*-------------------------------------------------------\n *计算两点建的距离\n-------------------------------------------------------*/\n//判断两个对象距离在dist_meter米以内\nbool dist_in_ll(double lat1, double lon1, double lat2, double lon2, double dist_meter);\n\n//判断两个对象距离在dist_meter米以内\nbool dist_in_xy(double x1, double y1, double x2, double y2, double dist_meter);\n\n//两点间的距离\ndouble dist_two_points_ll(double lat1, double lon1, double lat2, double lon2);\n\n#endif /*_NAD_FUNCTION_H*/\n"
},
{
"alpha_fraction": 0.5421994924545288,
"alphanum_fraction": 0.571611225605011,
"avg_line_length": 24.177419662475586,
"blob_id": "e885cab3dc15ac214098b6dc2e4569778e07e2c9",
"content_id": "bcdd5ea7ef65abbf339fc1a9fecd44aab5eb5306",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1718,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 62,
"path": "/athena/cc/planning/intelligent_park/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief a planning demo of lcm.\n */\n\n#include <iostream>\n\n//#include \"planning/planning.h\"\n#include \"common/car_state.h\"\n#include \"common/enum_list.h\"\n#include \"park/park.h\"\n\n#define NUM_EXTEND_TRAJECTORY 600 ///<给控制发轨迹时,需要延长一些(点的个数)\n///<档位参数\n#define AT_STATUS_P 0 ///<档位信息,P档\n#define AT_STATUS_R 1 ///<档位信息,R档\n#define AT_STATUS_N 2 ///<档位信息,N档\n#define AT_STATUS_D 3 ///<档位信息,D档\n#define AT_STATUS_M 4 ///<档位信息,M档\n\n\nusing namespace std;\n\n///park test 主程序\nint main(int argc, char *argv[])\n{\n cout << \" Intelligent Parking start ! 2018.07.28: 15:25. @ by liming in Guanggu\" << endl;\n\n IntelligentParking ip;\n path park_trajectory_d, park_trajectory_r;\n\n ///停车给信息\n StopPark sp;\n sp.x_ = 20.0;\n sp.y_ = 10.0;\n sp.heading_ = 60.0;\n sp.length_ = 8.5;\n sp.width_ = 2.3;\n sp.type_ = STOP_TYPE_PARALLEL_PARK;\n\n ip.set_park_point( sp );\n\n ///车停止位置\n CarState car_state;\n car_state.car_pose_.CurrentX_ = -4.0;\n car_state.car_pose_.CurrentY_ = 10.0;\n car_state.car_pose_.Current_heading_ = 90.0;\n\n ///生成轨迹\n ip.compute_parking_trajectory( car_state );\n\n ///D档部分\n ip.get_trajectory_d( park_trajectory_d );\n ip.extend_trajectory( park_trajectory_d, NUM_EXTEND_TRAJECTORY, 0.1, AT_STATUS_D );\n\n ///R档部分\n ip.get_trajectory_r( park_trajectory_r );\n ip.extend_trajectory( park_trajectory_r, NUM_EXTEND_TRAJECTORY, 0.1, AT_STATUS_R );\n\n return 0;\n}\n\n\n\n"
},
{
"alpha_fraction": 0.5278171300888062,
"alphanum_fraction": 0.5879020690917969,
"avg_line_length": 21.265766143798828,
"blob_id": "8035fbff70941b2d69abc6c4b0168b103bcd2054",
"content_id": "81718b6dbf4b7bf5caf7459fac93685a035161ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5419,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 222,
"path": "/athena/core/arm/Common/include/base/nad_type.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_type.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:公共头文件和数据类型定义\n-------------------------------------------------------*/\n#ifndef _NAD_TYPE_H\n#define _NAD_TYPE_H\n\n\n/*-------------------------------------------------------\n * Linux操作系统的头文件\n-------------------------------------------------------*/\n\n//linux的常用头文件\n#include <arpa/inet.h>\n#include <assert.h>\n#include <ctype.h>\n#include <dirent.h>\n#include <dlfcn.h>\n#include <errno.h>\n#include <fcntl.h>\n#include <glob.h>\n#include <math.h>\n#include <netdb.h>\n#include <netinet/in.h>\n#include <netinet/tcp.h>\n#include <pthread.h>\n#include <stdarg.h>\n#include <stdarg.h>\n#include <stddef.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n#include <strings.h>\n#include <sys/epoll.h>\n#include <sys/ioctl.h>\n#include <sys/ipc.h>\n#include <sys/mman.h>\n#include <sys/mount.h>\n#include <sys/resource.h>\n#include <sys/sem.h>\n#include <sys/select.h>\n#include <sys/shm.h>\n#include <signal.h>\n#include <sys/socket.h>\n#include <sys/stat.h>\n#include <sys/statfs.h>\n#include <sys/syscall.h>\n#include <sys/time.h>\n#include <sys/types.h>\n#include <sys/un.h>\n#include <time.h>\n#include <unistd.h>\n#include <net/if.h>\n#include <iostream>\n\n//支持stl\n#include <algorithm>\n#include <deque>\n#include <functional>\n#include <iterator>\n#include <vector>\n#include <string>\n#include <list>\n#include <map>\n#include <memory>\n#include <numeric>\n#include <queue>\n#include <set>\n#include <stack>\n#include <utility>\n#include <fstream>\n#include <regex>\nusing namespace std;\n\n//支持boost\n#include <boost/filesystem.hpp>\n#include <boost/tokenizer.hpp>\n#include <boost/lexical_cast.hpp>\n#include <boost/algorithm/string.hpp>\n\n//使用glog应该包含的头文件\n#include <glog/logging.h>\n#include <glog/raw_logging.h> //支持线程安全\n\n#include \"nad_enum.h\"\n\n//统一定义数据结构\ntypedef unsigned char uchar;\ntypedef unsigned short ushort;\ntypedef unsigned int uint;\ntypedef long long int64;\ntypedef unsigned long long uint64;\n\n//定义布尔值\ntypedef int boolean;\n#define TRUE 1\n#define FALSE 0\n\n//各种数据类型的最大最小值\n#define MAX_I8 127\n#define MIN_I8 (-128)\n#define MAX_I16 32767\n#define MIN_I16 (-32768)\n#define MAX_I32 2147483647L\n#define MIN_I32 (-MAX_I32 - 1L)\n#define MAX_I64 9223372036854775807LL\n#define MIN_I64 (-MAX_I64 - 1LL)\n#define MAX_U8 255\n#define MIN_U8 0\n#define MAX_U16 65535\n#define MIN_U16 0\n#define MAX_U32 4294967295UL\n#define MIN_U32 0\n#define MAX_U64 18446744073709551615ULL\n#define MIN_U64 0\n\n//常用大小定义\n#define _TB_ (1024LL * 1024 * 1024 * 1024) /* 1TB */\n#define _GB_ (1024 * 1024 * 1024) /* 1GB */\n#define _MB_ (1024 * 1024) /* 1MB */\n#define _KB_ (1024) /* 1KB */\n#define BUF_SIZE_8 8\n#define BUF_SIZE_16 16\n#define BUF_SIZE_20 20\n#define BUF_SIZE_32 32\n#define BUF_SIZE_64 64\n#define BUF_SIZE_128 128\n#define BUF_SIZE_256 256\n#define BUF_SIZE_512 512\n#define BUF_SIZE_1024 1024\n#define BUF_SIZE_4K (4 * _KB_)\n#define BUF_SIZE_8K (8 * _KB_)\n#define BUF_SIZE_16K (16 * _KB_)\n#define BUF_SIZE_32K (32 * _KB_)\n#define BUF_SIZE_64K (64 * _KB_)\n#define BUF_SIZE_1M (1 * _MB_)\n#define BUF_SIZE_2M (2 * _MB_)\n#define BUF_SIZE_4M (4 * _MB_)\n\n//圆周率定义\n#define PI 3.14159265358979323846264338\n\n#ifdef _NAD_CSU_PLANNING_\n #define PROC_TYPE \"csu_planning\"\n#endif\n\n#ifdef _NAD_RSU_PLANNING_\n #define PROC_TYPE \"rsu_planning\"\n#endif\n\n#ifdef _NAD_OBU_PLANNING_\n #define PROC_TYPE \"obu_planning\"\n#endif\n\n#ifdef _NAD_RSD_COLLECTOR_\n #define PROC_TYPE \"rsd_collector\"\n#endif\n\n#ifdef _NAD_SIM_SYSTEM_\n #define PROC_TYPE \"sim_system\"\n#endif\n\n#ifdef _NAD_SIM_VUI_\n #define PROC_TYPE \"sim_vui\"\n#endif\n\n#ifndef PROC_TYPE\n#define PROC_TYPE \"obu_planning\"\n#endif\n\n\n/*-------------------------------------------------------\n * 进程的名字宏\n-------------------------------------------------------*/\n\n//取值如\"csu\"/\"rsu\"/\"obu\"\nextern string g_ne_type;\n\n\n/*-------------------------------------------------------\n * 系统关键文件位置\n-------------------------------------------------------*/\n\n//基础目录,如\"/home/zhangyi/code_huashan_phase2_160909v1.1\"\nextern string NAD_PATH;\n\n//当前车号或设备,\"\"表示读config.xml的local_obu等,obu仅取尾数部分,如\"鄂A V94M0\"则填\"V94M0\"\n//目前仅用于obu_planning/obu_planning/rsd_collector\nextern string NAD_LOCAL_NE;\n\n//配置文件\n#define NAD_CONFIG_FILE \"/conf/config.xml\"\n\n//地图原点\n#define NAD_MAP_SET_POINT \"/conf/origin.route\"\n\n//数据库存盘文件(动态数据)\n#define NAD_DB_FILE \"/conf/db/db.xml\"\n\n//数据库存盘文件(静态数据)\n#define NAD_DB_TYPE_FILE \"/conf/db/db_type.xml\"\n\n//地图文件\n#define NAD_MAP_FILE \"/map/nad.osm\"\n\n//文件目录\n#define NAD_TASK_DIR \"/conf/task/\"\n\n//日志文件目录\n#define NAD_LOG_DIR \"/log/\"\n\n/*-------------------------------------------------------\n * 系统关键参数\n-------------------------------------------------------*/\n\n//最小定时器间隔,1毫秒,太小的话影响lcm收消息,太大的话无法实现密集触发的定时器\n#define MIN_TIMER_MS 1\n\n\n#endif /*_NAD_TYPE_H*/\n"
},
{
"alpha_fraction": 0.6740331649780273,
"alphanum_fraction": 0.6740331649780273,
"avg_line_length": 29.16666603088379,
"blob_id": "ae303613138f326cff525d19da569871c7995d75",
"content_id": "eac08a5394ea9677aa2ab7881a1afb2ff1434084",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 181,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 6,
"path": "/athena/core/x86/Camera/vision_ssd_detect/util/BoundingBox.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"BoundingBox.h\"\n\n// Constructors\n\n//BoundingBox::BoundingBox(double cx, double cy, double width, double height)\n // : cx(cx), cy(cy), width(width), height(height) {}\n"
},
{
"alpha_fraction": 0.6610686779022217,
"alphanum_fraction": 0.6641221642494202,
"avg_line_length": 14.595237731933594,
"blob_id": "a73066ea8c8ebbcd56507e52cd449a68c4237180",
"content_id": "e579029d75b5f8c16824dd6786257366de57152e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 679,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 42,
"path": "/athena/examples/ROS/src/DataRecording/rtk_inertial/src/Commons/transfer.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"LocalGeographicCS.hpp\"\n\nclass coord_transfer\n{\npublic:\n LocalGeographicCS cs;\n coord_transfer();\n coord_transfer(double lat, double lon);\n ~coord_transfer();\n\n void set_origin(double lat, double lon);\nprivate:\n double origin_lat;///<投影原点纬度.\n double origin_lon;///<投影原点经度.\n\n\n};\n\ncoord_transfer::coord_transfer()\n\t:origin_lat(0)\n\t,origin_lon(0)\n{\n\n}\n\n\ncoord_transfer::coord_transfer(double lat, double lon)\n{\n set_origin(lat, lon);\n}\n\n\n\nvoid coord_transfer::set_origin(double lat, double lon)\n{\n origin_lat = lat;\n origin_lon = lon;\n cs.set_origin(origin_lat, origin_lon);\n}\ncoord_transfer::~coord_transfer()\n{\n}\n"
},
{
"alpha_fraction": 0.521510124206543,
"alphanum_fraction": 0.5504828691482544,
"avg_line_length": 19.709091186523438,
"blob_id": "c0de3c6be49a4ecd3ba3cfa32d699cabf9587117",
"content_id": "ac538a7ffaf1331ad5700f963fe0da0920a6946d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1221,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 55,
"path": "/athena/core/x86/Control/include/local_localization.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file local_localization.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef LOCAL_LOCALIZATION_H_\n#define LOCAL_LOCALIZATION_H_\n\n\nclass LocalLocalization{\npublic:\n LocalLocalization()\n {\n current_x_ = 0.0;\n current_y_ = 0.0;\n current_z_ = 0.0;\n current_yaw_ = 0.0;\n current_pitch_ = 0.0;\n current_roll_ = 0.0;\n current_heading_speed_ = 0.0;\n current_roll_speed_ = 0.0;\n current_pitch_speed_ = 0.0;\n current_speed_ = 0.0;\n current_acceleration_ = 0.0;\n }\n ~LocalLocalization() = default;\n ///地图坐标 x\n double current_x_;\n ///地图坐标 x\n double current_y_;\n ///地图坐标 x\n double current_z_;\n ///航向角\n double current_yaw_;\n ///俯仰角\n double current_pitch_;\n ///横滚角\n double current_roll_;\n ///航向角速度\n double current_heading_speed_;\n ///横滚角速度\n double current_roll_speed_;\n ///俯仰角速度\n double current_pitch_speed_;\n ///速度\n double current_speed_;\n ///加速度\n double current_acceleration_;\n};\n\n#endif // LOCAL_LOCALIZATION_H_\n"
},
{
"alpha_fraction": 0.604651153087616,
"alphanum_fraction": 0.6259689927101135,
"avg_line_length": 23.571428298950195,
"blob_id": "0881bc0a6ab25abe734e08f6e2c9d88b23bfe08d",
"content_id": "236df3a6b0d15147dc3bf40712318c7895d980ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 516,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 21,
"path": "/athena/examples/LCM/Singlecar/control/apps/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <GL/glu.h>\n#include <GL/glut.h>\n#include <glog/logging.h>\n#include \"control.h\"\n//#include \"control_debug/control_debug.h\"\n\nint main(int argc, char **argv) {\n std::string ver = \"control start--20181026V1\";\n glutInit(&argc, argv);\n system(\"mkdir -p log\");\n google::InitGoogleLogging(argv[0]);\n\n Control *control = new Control(ver);\n control -> Init();\n control -> start();\n // ControlDebug::Init(&control);\n glutMainLoop();\n glutMainLoop();\n control -> join();\n return 0;\n}\n"
},
{
"alpha_fraction": 0.5515773296356201,
"alphanum_fraction": 0.5798848271369934,
"avg_line_length": 29.820701599121094,
"blob_id": "ed8e81cdb717f153eee3cd3af7257cc1832f7423",
"content_id": "ee582ebc800dec8198adbb853f2c91121325596e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 20938,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 541,
"path": "/athena/core/x86/Planning/include/common/enum_list.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_enum.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:公共枚举值\n-------------------------------------------------------*/\n#ifndef _NAD_ENUM_H\n#define _NAD_ENUM_H\n\n#define PI 3.14159265358979323846264338\n\n#define to_radians(x) ( (x) * (PI / 180.0 )) ///<角度转弧度\n#define to_degrees(x) ( (x) * (180.0 / M_PI )) ///<弧度转角度\n\n#define MAXIMUM_VALUE 9e9 ///<极大值\n#define MINIMUM_VALUE -9e9 ///<极小值\n\n//replan flag\n#define REPLAN_FLAG_NONE 0 ///<不需要重规划\n#define REPLAN_FLAG_NAVI 1 ///<云端需要重规划\n#define REPLAN_FLAG_PLANNING 2 ///<planning自身需要重规划\n\n//cross_road_status_\n#define NOT_AT_CROSSING 0 ///<非路口\n#define AT_CROSSING_GREEN 1 ///<在路口,绿灯\n#define AT_CROSSING_RED 2 ///<在路口,红灯(黄灯当做红灯处理)\n\n//light status\n#define LIGHT_STATUS_NONE 0 ///<无效值\n#define LIGHT_STATUS_GREEN 1 ///<绿灯\n#define LIGHT_STATUS_YELLOW 2 ///<黄灯\n#define LIGHT_STATUS_RED 3 ///<红灯\n\n//obstacle type\n#define OBSTACLE_TYPE_UNKNOW 0 ///<未知障碍物\n#define OBSTACLE_TYPE_CAR 1 ///<车辆\n#define OBSTACLE_TYPE_PEDESTRIAN 2 ///<行人\n\n//filter_obstacle_flag\n#define FILTER_OBSTACLE_NOT 0 ///<不过滤\n#define FILTER_OBSTACLE_POINT 1 ///<按障碍物中心点过滤\n#define FILTER_OBSTACLE_RECT 2 ///<按障碍物边框过滤\n\n//Stop_Park\n#define STOP_TYPE_TERMINAL 10 ///<终点信息\n#define STOP_TYPE_PARALLEL_PARK 1 ///<平行泊车信息\n#define STOP_TYPE_VERTICAL_PARK 2 ///<垂直泊车信息\n#define STOP_TYPE_OBLIQUE_PARK 3 ///<斜向泊车信息\n\n#define DRIVE_MODE_OVERTAKE 1 ///超车模式\n#define DRIVE_MODE_FOLLOW 2 ///跟车模式\n\n\n//----------------------------------------------------------\n// 网元定义枚举值\n//----------------------------------------------------------\n\n//网元类型\n#define E_NE_CSU 0\n#define E_NE_RSU 1\n#define E_NE_OBU 2\n\n//子系统类型\n#define E_ME_CSU 0\n#define E_ME_OCT 1\n#define E_ME_RSU 10\n#define E_ME_ETS 11\n#define E_ME_RSD 12\n#define E_ME_OBU 20\n#define E_ME_VUI 21\n\n//心跳超时时长\n#define HTBT_TIMEOUT 10000 //暂时改成很大,避免调试时很容易心跳注销\n\n\n//----------------------------------------------------------\n// obu上的obu的状态\n//----------------------------------------------------------\n\n//obu管理自身的状态机\n#define E_OBU_SESSION_OBU_DISCONN_HAND 0 //无网络人工驾驶\n#define E_OBU_SESSION_OBU_CONNECT_HAND 1 //有网络人工驾驶\n#define E_OBU_SESSION_OBU_DISCONN_AUTO 2 //无网络自动驾驶\n#define E_OBU_SESSION_OBU_CONNECT_AUTO 3 //有网络自动驾驶\n\n//obu管理自身注册状态的状态机\n#define E_OBU_SESSION_OBU_LOGIN_CSU 0 //正在向csu要个rsu_name\n#define E_OBU_SESSION_OBU_LOGIN_RSU 1 //正在注册到rsu\n#define E_OBU_SESSION_OBU_LOGIN_OK 2 //注册成功\n\n//obu请求启动自动驾驶的状态机\n#define E_OBU_SESSION_OBU_START_AUTO_WAIT 0 //已发送or_start_auto_request消息,等待应答\n#define E_OBU_SESSION_OBU_START_AUTO_OK 1 //收到ro_start_auto_respond应答,启动成功\n\n/**\n * class obu_session_obu\n * {\n * int session_status; //主状态,枚举值为E_OBU_SESSION_OBU_XXX\n * int login_status; //登录状态,枚举值为E_OBU_SESSION_OBU_LOGIN_XXX\n * nad_planning planning; //路径规划,planning.list.size()==0表示无规划\n * int htbt_count; //心跳计数,每次发送or_info_report则htbt_count++;\n * //每次收到ro_info_report则htbt_count=0,htbt_count>HTBT_TIMEOUT则超时\n * } obu;\n *\n * //每秒处理obu状态\n * if (obu.session_status == DISCONN_HAND || obu.session_status == DISCONN_AUTO)\n * {\n * 根据obu.login_status发消息注册\n * }\n * else if (obu.htbt_count > HTBT_TIMEOUT) //心跳超时\n * {\n * obu.session_status = DISCONN_AUTO; //根据降级原则,首先降级为DISCONN_AUTO\n * obu.login_status = LOGIN_CSU; //开始不断的注册\n * }\n *\n * //收到了ro_obu_logout_notify(rsu或csu心跳超时注销了obu),重新注册\n * {\n * obu.session_status = DISCONN_AUTO; //根据降级原则,首先降级为DISCONN_AUTO\n * obu.login_status = LOGIN_CSU; //开始不断的注册\n * }\n**/\n\n\n//----------------------------------------------------------\n// rsu上的obu的状态\n//----------------------------------------------------------\n\n//rsu上的obu状态\n#define E_RSU_SESSION_OBU_DISCONN 0 //无网络\n#define E_RSU_SESSION_OBU_CONNECT_HAND 1 //有网络人工驾驶\n#define E_RSU_SESSION_OBU_CONNECT_AUTO 3 //有网络自动驾驶\n\n/**\n * class rsu_session_obu\n * {\n * int session_status; //主状态,枚举值为E_RSU_SESSION_OBU_XXX\n * nad_planning planning; //路径规划,planning.list.size()==0表示无规划\n * int htbt_count; //心跳计数,每次发送rc_info_report则htbt_count++;\n * //每次收到cr_info_report则htbt_count=0,htbt_count>HTBT_TIMEOUT则超时\n * } obu;\n *\n * //每秒处理obu状态\n * if (obu.session_status != DISCONN && obu.htbt_count > HTBT_TIMEOUT) //心跳超时\n * {\n * 发消息rc_obu_logout_notify、ro_obu_logout_notify\n * 删除rsu_session_obu\n * }\n**/\n\n\n//----------------------------------------------------------\n// csu上的obu的状态\n//----------------------------------------------------------\n\n//csu上的obu状态\n#define E_CSU_SESSION_OBU_DISCONN 0 //无网络\n#define E_CSU_SESSION_OBU_CONNECT_HAND 1 //有网络人工驾驶\n#define E_CSU_SESSION_OBU_CONNECT_AUTO 3 //有网络自动驾驶\n\n/**\n * class csu_session_obu\n * {\n * int session_status; //主状态,枚举值为E_RSU_SESSION_OBU_XXX\n * nad_planning planning; //路径规划,planning.list.size()==0表示无规划\n * int htbt_count; //心跳计数,每次发送rc_info_report则htbt_count++;\n * //每次收到cr_info_report则htbt_count=0,htbt_count>HTBT_TIMEOUT则超时\n * } obu;\n *\n * //每秒处理obu状态\n * if (obu.session_status != DISCONN && obu.htbt_count > HTBT_TIMEOUT) //心跳超时\n * {\n * 发消息cr_obu_logout_notify\n * 删除csu_session_obu\n * }\n**/\n\n\n//----------------------------------------------------------\n// rsu的状态\n//----------------------------------------------------------\n\n//rsu管理自身的状态机\n#define E_RSU_SESSION_RSU_DISCONN 0 //未连接\n#define E_RSU_SESSION_RSU_CONNECT 1 //已连接\n\n//csu上的rsu状态\n#define E_CSU_SESSION_RSU_DISCONN 0 //未连接\n#define E_CSU_SESSION_RSU_CONNECT 1 //已连接\n\n\n//----------------------------------------------------------\n// ets的枚举值\n//----------------------------------------------------------\n\n/**\n * rsu如果收到ets的er_ets_report消息,rsu就会创建对应的rsu_session_light/rsu_session_limspeed/rsu_session_block\n * 并在每秒上报中通过rc_info_report、ro_info_report通知到csu、obu,其中csu同理创建session。\n *\n * rsu收到er_ets_report消息时会把rsu_session_light/rsu_session_limspeed/rsu_session_block的htbt_count=0,\n * rsu的一秒定时器会把rsu_session_light/rsu_session_limspeed/rsu_session_block的htbt_count++,\n * 如果htbt_count > HTBT_TIMEOUT,就会删除rsu_session_light/rsu_session_limspeed/rsu_session_block。\n *\n * csu收到rc_info_report会做同样的心跳处理,如果某ets.htbt_count > HTBT_TIMEOUT,删除对应session\n**/\n\n//交通元素类型枚举值\n\n#define TET_NONE -1 //未知\n#define TET_LANE 0 //车道\n#define TET_LIGHT 10 //红绿灯\n#define TET_LIMSPEED 11 //限速牌\n#define TET_BLOCK 12 //施工标志\n//#define TET_CROSSING 20 //路口\n#define KP_LIGHT_ACTIVE 20 //红绿灯激活点\n#define KP_LIGHT_STOP 21 //红绿灯停止点(vui)\n#define KP_LIGHT_EXIT 22 //红绿灯退出点\nconst char *tet_str(const int val);\n\n//车道方向枚举值\n#define LD_NONE 0 //远离路口或离开路口,行驶在哪条车道都可以,无方向属性\n#define LD_STRAIGHT 1 //直行\n#define LD_LEFT 2 //左转\n#define LD_RIGHT 4 //右转\n#define LD_TURN 8 //掉头\n#define LD_S_L (LD_STRAIGHT | LD_LEFT) //直行+左转 01 | 10 = 11 = 3\n#define LD_S_R (LD_STRAIGHT | LD_RIGHT) //直行+右转 001 | 100 = 101 = 5\n#define LD_S_L_R (LD_STRAIGHT | LD_LEFT | LD_RIGHT) //直行+左转+右转 001 | 010 | 100 = 111 = 7\n#define LD_L_T (LD_LEFT | LD_TURN) //左转+掉头 0010 | 1000 = 1010 = 10\n\n//红绿灯取值\n#define LS_NONE 0 //离线\n#define LS_GREEN 1 //绿灯\n#define LS_YELLOW 2 //黄灯\n#define LS_RED 3 //红灯\n//由障碍物或者施工标志引起的路权变化\n#define LS_RED_OBSTACLE 4 //\n#define LS_RED_BLOCK 5 //\nconst char *ls_str(const int val);\n\n\n//----------------------------------------------------------\n// rsd的枚举值\n//----------------------------------------------------------\n\n//障碍物类型\n#define OT_OBU 0 //安装了OBU的联网车辆(包括仅支持ADAS的车)\n#define OT_CAR 1 //没安装OBU的社会车辆\n#define OT_PEOPLE 2 //行人\n#define OT_BLOCK 3 //普通实物单车道施工标志\n#define OT_OTHER 4 //其他障碍物\nconst char *ot_str(const int val);\n\n//各种障碍物的距离抗抖动\n#define SAME_DIST_CAR 2.0 //OT_OBU和OT_CAR在2米内认为是同一障碍物\n#define SAME_DIST_PEOPLE 1.0 //OT_PEOPLE在1米内认为是同一障碍物\n#define SAME_DIST_BLOCK 1.0 //OT_BLOCK在1米内认为是同一障碍物\n#define SAME_DIST_OTHER 1.0 //OT_OTHER在1米内认为是同一障碍物\n\n//传感器类型\n#define RSD_SENSOR_LIDAR 0 //激光雷达\n#define RSD_SENSOR_CAMERA 1 //摄影头\n#define RSD_SENSOR_RADAR 2 //毫米波雷达\nconst char *rsd_sensor_str(const int val);\n\n\n//----------------------------------------------------------\n// 路径规划的枚举值\n//----------------------------------------------------------\n\n//key_point类型\n#define KP_NONE -1 //不是关键点(即:拟合点)\n#define KP_UNKNOWN 0 //未赋予关键点类型(启动和终点允许设置为本类型,我们认为key_point_list的第一个是起点,最后一个是终点)\n#define KP_CHANGE_LANE 1 //换道点(不分左右)\n#define KP_CHANGE_LANE_LEFT 2 //向左换道点\n#define KP_CHANGE_LANE_RIGHT 3 //向右换道点\n#define KP_CHANGE_LANE_OK 4 //换道成功点\n\n\n//----------------------------------------------------------\n// 编队的枚举值\n//----------------------------------------------------------\n\n//修改编队的操作类型\n#define SP_ADD_OBU_TO_TAIL 0 //在队尾追加车辆\n#define SP_DELETE_OBU 1 //删除车辆\n#define SP_SET_SPEED 2 //修改编队整体车速\nconst char *sp_str(const int val);\n\n//删除编队的操作类型\n#define DPR_ARRIVED 0 //编队整体到达目的地\n#define DPR_DELETE_ALL 1 //除头车外所有车辆都被删除\n#define DPR_OCT_FORCE 2 //OCT为了调试强制解散编队\nconst char *dpr_str(const int val);\n\n\n//----------------------------------------------------------\n// obu的枚举值\n//----------------------------------------------------------\n\n//加速度模式\n#define ACC_P_CONSTANT_SPEED 0 //恒速\n#define ACC_P_CONSTANT_ACC 1 //匀加速\n#define ACC_P_CONSTANT_DECE 2 //匀减速\n#define ACC_P_RAPID_ACC 3 //急加速\n#define ACC_P_RAPID_DECE 4 //急减速\n\n//一种类型的传感器最大数量,要和message.lcm保持一致\n#define MAX_SENSOR 16\n\n//传感器状态\n#define SENSOR_NONE 0 //传感器不在位\n#define SENSOR_OK 1 //传感器正常\n#define SENSOR_ERR 2 //传感器故障\nconst char *sensor_str(const int val);\n\n//照相机枚举值\n#define IDX_CAMERA_1 0 //前视车道线识别单目相机\n#define IDX_CAMERA_2 1 //前视障碍物识别双目相机\n\n//毫米波雷达枚举值\n#define IDX_RADAR_ESR 0 //前视远距离雷达(ESR)\n#define IDX_RADAR_SRR_FL 1 //左前方SRR雷达\n#define IDX_RADAR_SRR_FR 2 //右前方SRR雷达\n#define IDX_RADAR_SRR_BL 3 //左后方SRR雷达\n#define IDX_RADAR_SRR_BR 4 //右后方SRR雷达\n\n//激光雷达枚举值\n#define IDX_LIDAR_CENTER 0 //前视远距离雷达\n\n//GPS枚举值\n#define IDX_GPS_CENTER 0 //中置GPS/INS组合导航\n#define IDX_INS_CENTER 1 //中置GPS/INS组合导航\n\n//路径规划的原因\n#define ROUTE_REASON_VUI 0 //在VUI上启动规划\n#define ROUTE_REASON_OBU_RE_PATH 1 //OBU偏离路径,发生道路级重规划\n#define ROUTE_REASON_ADD_PLATOON 2 //OCT创建车队\n#define ROUTE_REASON_ADD_TAIL 3 //在车队尾部追加车辆\n#define ROUTE_REASON_JOIN_OBU 4 //在车队中间追加车辆\n#define ROUTE_REASON_OCT_CALL_CAR 5 //召车\n#define ROUTE_REASON_ETS_BLOCK 6 //施工标志封闭全部道路\n#define ROUTE_REASON_HEAD_CAR 7 //头车更新路径\nconst char *route_reason_str(const int val);\n\n//启动自动驾驶的原因\n#define START_REASON_VUI 0 //在VUI上切换为自动驾驶\n#define START_REASON_CSU 1 //CSU启动自动驾驶(如:脚本)\n#define START_REASON_RSU 2 //RSU启动自动驾驶(暂未使用)\n#define START_REASON_PLATOON 3 //编队启动自动驾驶\n#define START_REASON_CALL_CAR 4 //召车启动自动驾驶\nconst char *start_reason_str(const int val);\n\n//退出自动驾驶的原因\n#define STOP_REASON_VUI 0 //在VUI上切换为人工驾驶\n#define STOP_REASON_CSU 1 //CSU退出自动驾驶(如:脚本)\n#define STOP_REASON_RSU 2 //RSU退出自动驾驶(如:编队)\n#define STOP_REASON_ARRIVED 3 //到达目的地,obu切换为人工驾驶\nconst char *stop_reason_str(const int val);\n\n//换道方向\n#define CL_DIRECTION_NONE 0 //保持当前车道\n#define CL_DIRECTION_LEFT 1 //向左换道\n#define CL_DIRECTION_RIGHT 2 //向右换道\nconst char *cl_direction_str(const int val);\n\n//换道原因\n#define CL_REASON_FORBIDEN -1 //不允许换道\n#define CL_REASON_HAND 0 //人工拨动转向摇杆要求换道\n#define CL_REASON_OBU_AVOIDING 1 //OBU避障,所以换道\n#define CL_REASON_ETS_BLOCK 2 //电子施工标志封闭部分车道,所以换道\n#define CL_REASON_TRAFFIC_JAM 3 //慢车阻塞我超过10秒,OBU发起换道\n#define CL_REASON_START 4 //起步时从低速道换到高速道\n#define CL_REASON_STOP 5 //停车时从高速道换到低速道\n#define CL_REASON_BROKEN_LANE 6 //在断头车道提前换到旁边的车道\n#define CL_REASON_PLATOON_DROP 7 //编队踢掉车辆\n#define CL_REASON_RSU_AVOIDING 8 //RSUÖž»ÓOBU±ÜÕÏ\n#define CL_REASON_OBU_RE_LANE 9 //OBUÆ«Àë·Ÿ¶£¬·¢ÉúµÀ·Œ¶Öع滮\n#define CL_REASON_PLATOON_STOP 10 //编队停车时从高速道换到低速道\n#define CL_REASON_PLATOON_JOIN 11 //编队中间加入车辆\nconst char *cl_reason_str(const int val);\n\n//换道状态\n#define CL_STATUS_REQUEST 0 //单车认为可以换道\n#define CL_STATUS_BLOCKED 1 //单车认为有障碍,需要网络通知相关车辆避让\n#define CL_STATUS_RUNNING 2 //单车正在换道\n#define CL_STATUS_COMPLETE 3 //单车换道完成\n#define CL_STATUS_CANCEL 4 //单车换道过程中被取消\n#define CL_STATUS_CANCEL_COMP 5 //单车换道取消后回本道完成\nconst char *cl_status_str(const int val);\n\n//下发中心线的类型\n#define IS_NOT_REPLAN 0 //不重规划\n#define IS_REPLAN 1 //重规划\n\n//车辆默认行为\n//#define DRIVE_BEHAVIOR_OVERTAKE 0 //允许车辆自主换道超车\n//#define DRIVE_BEHAVIOR_FOLLOW 1 //车辆自身只能跟车,网络下发换道超车(默认取本值)\n\n\n#define CAR_ACTION_NONE 0 //无效值\n#define CAR_ACTION_SINGLE 1 //单车模式\n#define CAR_ACTION_PLATOON_HEAD 2 //车队头车\n#define CAR_ACTION_PLATOON_OTHER 3 //车队其他车辆\n\n#define HUMAN_DRIVING 1\n#define ASSIST_DRIVING 2\n#define AUTO_DRIVING 3\n\n//----------------------------------------------------------\n// oct、vui的枚举值\n//----------------------------------------------------------\n\n//csu管理oct的状态机\n#define E_CSU_SESSION_OCT_DISCONN 0 //未连接\n#define E_CSU_SESSION_OCT_CONNECT 1 //已连接\n\n//obu管理vui的状态机\n#define E_OBU_SESSION_VUI_DISCONN 0 //未连接\n#define E_OBU_SESSION_VUI_CONNECT 1 //已连接\n\n//日志等级\n#define LOG_INFO 0 //普通日志\n#define LOG_WARNING 1 //警告日志\n#define LOG_ERROR 2 //错误日志\n#define LOG_FATAL 3 //致命错误日志\nconst char *log_str(const int val);\n\n//告警等级\n#define ALARM_INFO 0 //提示告警(灰色)\n#define ALARM_ADVISE 1 //建议告警(绿色)\n#define ALARM_WARNING 2 //警告告警(黄色)\n#define ALARM_DANGER 3 //危险告警(红色)\nconst char *alarm_str(const int val);\n\n//告警类型\n#define ALARM_TYPE_OCCUR 0 //产生告警(在VUI开始显示)\n#define ALARM_TYPE_RESTORE 1 //恢复告警(在VUI停止显示)\n#define ALARM_TYPE_EVENT 2 //事件告警(在VUI显示5秒后自动隐藏)\n\n//告警展示类型\n#define ALARM_SHOW_TEXT 0 //文本\n#define ALARM_SHOW_SOUND 1 //语音\n#define ALARM_SHOW_TEXT_SOUND 2 //文本加语音\n\n//产生告警的距离\n#define ALARM_DIS_LONG 100 // <=100米\n#define ALARM_DIS_MIDDLE 50 // <=50米\n#define ALARM_DIS_SHORT 20 // <=20米\n\n//修改ETS的原因\n#define ETS_REASON_DIALOG 0 //通过对话框修改\n#define ETS_REASON_TASK 1 //通过脚本修改\n#define ETS_REASON_CROSSING 2 //通过路口算法修改\nconst char *ets_reason_str(const int val);\n\n\n//----------------------------------------------------------\n// 车辆CAN的枚举值\n//----------------------------------------------------------\n\n//发动机状态\n#define ENGINE_STATUS_STOPPED 0 //停止\n#define ENGINE_STATUS_STALLED 1 //熄火\n#define ENGINE_STATUS_RUNNING 2 //运行\n#define ENGINE_STATUS_CRANKING 3 //启动\n\n////档位状态\n//#define AT_STATUS_OFF 255\n//#define AT_STATUS_P 0\n//#define AT_STATUS_R 1\n//#define AT_STATUS_N 2\n//#define AT_STATUS_D 3\n//#define AT_STATUS_M 8\n//#define AT_STATUS_M1 9\n//#define AT_STATUS_M2 10\n//#define AT_STATUS_U 6\n//const char *at_status_str(const int val);\n\n//刹车信号\n#define BRAKE_STATUS_OFF 0 //未刹车\n#define BRAKE_STATUS_ON 1 //正在刹车\n\n//转向灯状态\n#define FLASHING_STATUS_NONE 0 //不亮\n#define FLASHING_STATUS_L 1 //左转向\n#define FLASHING_STATUS_R 2 //右转向\n#define FLASHING_STATUS_LR 3 //双闪\n\n//灯光状态\n#define BEAM_STATUS_OFF 0 //关灯\n#define BEAM_STATUS_ON 1 //开灯\n\n\n//----------------------------------------------------------\n// GPS、惯导的枚举值\n//----------------------------------------------------------\n\n//定位定姿状态,源自POS状态量的高2位\n#define GPS_FLAG_MA 0 //机械编排\n#define GPS_FLAG_KEEP 0 //保存整秒时刻状态\n#define GPS_FLAG_LC_BEGIN 1 //组合更新计算开始\n#define GPS_FLAG_LC_END 2 //组合解算完成\n\n//RTK状态,源自POS状态量的低6位\n#define GPS_N_SPP 0 //单点定位\n#define GPS_N_FLOAT 1 //浮动解\n#define GPS_N_FIXED 2 //固定解,精度最佳\n#define GPS_N_NG 3 //失败解\n\n//----------------------------------------------------------\n// xxx_session_obu中的platoon_flag的枚举值,还未用到\n//----------------------------------------------------------\n\n//车辆或编队的状态\n#define PLATOON_CONVERGE 0 //各个obu正在汇聚,未形成编队\n#define PLATOON_FIN 1 //obu已经在编队中\n#define PLATOON_DROP_OUT_PRE 2 //obu准备退出编队\n#define PLATOON_DROP_OUT 3 //obu正在退出编队\n#define PLATOON_JOIN 4 //obu正在加入编队\n#define VAHICLE_FREE 5 //obu是自由车辆\n\n//调用汽院的PlatoonControl的返回值\n#define PLATOON_RET_DOING 0 //当前状态未执行完\n#define PLATOON_RET_OK 1 //执行完了\n\n//坐标转换\nconst double pi = 3.14159265358979324;\nconst double a = 6378245.0;\nconst double ee = 0.00669342162296594323;\nconst double x_pi = 3.14159265358979324 * 3000.0 / 180.0;\n\nconst double car_follow_dis = 12;\nconst double car_out_dis = 15;\n\n#define MAX_YAW 50.0 //头指向超过60度即可判断为弯道\n\nconst int SPEED_LOW = 5;\nconst int DISTANCE_IDLE = 10;\nconst double ACCELARATION = 0.4;\n\n#endif\n"
},
{
"alpha_fraction": 0.5182849764823914,
"alphanum_fraction": 0.5245901346206665,
"avg_line_length": 32.04166793823242,
"blob_id": "d4123de3f7e4bd71f37050140c8ce76cbf2cb1b9",
"content_id": "bbfcff03d202eaf06ec14462c9d496dd1607352a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 793,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 24,
"path": "/athena/core/x86/Planning/include/vehicle_dynamic/steering_angle.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n#include \"common/math_util.h\"\n#include \"common/point.h\"\n#include \"common/path.h\"\n#include \"vehicle_dynamic/heading.h\"\n#include \"spline/spline.h\"\n\ndouble cau_ks_by_3_points( double x[3], double y[3] );\n\ndouble cau_steering_angle_from_ks(tk::spline s_x, tk::spline s_y,\n double cs, double Ux, double& rr,\n double every1, double every2);\n//\ndouble cau_steering_angle_from_ks(tk::spline s_x,\n tk::spline s_y,\n double cs,\n double Ux,\n double& rr);\n\ndouble calculate_sign_and_value_offset(\n double x, double y, double yaw,\n double x_c, double y_c, double yaw_c);\n"
},
{
"alpha_fraction": 0.425355464220047,
"alphanum_fraction": 0.4360189437866211,
"avg_line_length": 18.18181800842285,
"blob_id": "0aff07975afa06bb7bb839c93236a66aeaf8b84b",
"content_id": "bb9f3f7d10d61e3a9a5629534b8913c2dc16ad06",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1964,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 88,
"path": "/athena/core/arm/Planning/include/planning/planning_output.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 局部规划的输出信息。\n */\n\n #ifndef _PLANNING_OUTPUT_H\n #define _PLANNING_OUTPUT_H\n\n#include <vector>\n#include \"common/navi_point.h\"\n\n/**\n * @class OutTrajectory\n * @brief motion层生成的轨迹信息(导航精确控制点的序列),最终下发给controller。\n */\nclass OutTrajectory\n{\n public:\n double gps_time_; ///<GPS时间\n\n int num_of_points_; ///<点的个数\n\n int type_; ///<轨迹类型\n\n double reserved_; ///<预留位\n\n std::vector< navi_point > points_; ///<导航精确控制点的序列\n\n int car_action_; ///<1表示单车模式,2表示车队头车模式,3表示车队跟随车模式\n\n int driving_mode_; ///<0表示人工驾驶,1表示自动驾驶\n\n public:\n /**\n * @brief 构造函数\n */\n OutTrajectory()\n {\n gps_time_ = 0.0;\n num_of_points_ = 0;\n type_ = 0;\n reserved_ = 0.0;\n points_.clear();\n car_action_ = 0;\n driving_mode_ = 0;\n }\n /**\n * @brief 析构函数\n */\n virtual ~OutTrajectory()\n {\n\n }\n};\n\n\n/**\n * @class LongitudinalControlInfo\n * @brief motion下发给controller的纵向信息。\n */\nclass LongitudinalControlInfo\n{\n public:\n double tar_speed_; ///<目标速度,单位m/s\n\n double tar_accelerate_; ///<目标加速度,单位m/s2\n\n\n public:\n /**\n * @brief 构造函数\n */\n LongitudinalControlInfo()\n {\n tar_speed_ = 0.0;\n tar_accelerate_ = 0.0;\n }\n /**\n * @brief 析构函数\n */\n virtual ~LongitudinalControlInfo()\n {\n\n }\n};\n\n#endif //_PLANNING_OUTPUT_H\n"
},
{
"alpha_fraction": 0.6218338012695312,
"alphanum_fraction": 0.6304165720939636,
"avg_line_length": 25.98870086669922,
"blob_id": "79fde2efba4f4aa0f42e6e56d9a516e4aa910290",
"content_id": "11d971e80e448ca98fc3569b75001c827aedca2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9580,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 354,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneRegion.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "WINDOWS-1252",
"text": "#ifndef _LANE_REAGION_H_\n#define _LANE_REAGION_H_\n\n#include \"../utils/type.h\"\n#include \"../utils/config.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerPairs.h\"\n#include \"LaneParameter.h\"\n#include \"LaneParameterOneSide.h\"\n#include \"LaneSide.h\"\n\n\n\nextern double g_dbOffsetSearchMarginSequence ; // ??¨CT???¨¹???E??¡ë¡°?????¨¹???E?¨¬¡ª¨¬?????E???¨¬???¨º?¨¬?¨C¡ªe¡¯l // 20110703\nextern double g_dbYawSearchMarginSequence ; // 20110702\nextern double g_dbOffsetSearchMarginTrack ; // 20111014\nextern double g_dbYawSearchMarginTrack ;\n\nextern double g_dbOffsetSearchMarginForComplexLaneBoundary ; // 20111202\n\nclass LaneRegion\n{\nprivate:\n double _dPos[2][2];\n int *_piProcLine;\n int _iProcLineNumber;\n int _iProcLineWidth;\n unsigned char *_pcProcLineImage;\n LaneSide _Side[LR_NUM];\n //LaneMarkerPair\t_aLaneMarkerPairCandidate[CS4_LANES];\n int _iLaneMarkerPairNumber;\n BOOL _bAvailable;\n int _iRefLaneWidth;\n int _iRefLaneWidth_a;\n int _iRefLaneWidth_p;\n BOOL _bCrossLineClearFlag;\n\n //////////////////////////////////////////////////////////////////////////\n LaneMarkerPairs *_pLaneMarkerPairs,*_pLaneMarkerMul;\n\n int _iLineNumber;\n\n#define\tMAX_EDGE_POINT_NUM\t1000\n int _aUpEdgeOnInputlines[MAX_EDGE_POINT_NUM][2];\n int _aDownEdgeOnInputlines[MAX_EDGE_POINT_NUM][2];\n int _aiUpEdgeOnRoad[MAX_EDGE_POINT_NUM][2];\n int _aiDownEdgeOnRoad[MAX_EDGE_POINT_NUM][2];\n int _iUpEdgePointNumber;\n int _iDownEdgePointNumber;\n\n int *_piUpEdgeOnOneline;\n int *_piDownEdgeOnOneline;\n\n int _iFoundCounter;\n int _iLostCounter;\n\n BOOL _bFoundNow;\n BOOL _bFound;\n\npublic:\n LaneRegion(void);\n inline ~LaneRegion(void)\n {\n SAFE_DELETE(_pLaneMarkerPairs);\n }\n void initialize(void);\n inline double Top(void)\n {\n return _dPos[0][0];\n }\n inline double Left(void)\n {\n return _dPos[0][1];\n }\n inline double Bottom(void)\n {\n return _dPos[1][0];\n }\n inline double Right(void)\n {\n return _dPos[1][1];\n }\n inline void Top(double dV)\n {\n _dPos[0][0] = dV;\n }\n inline void Left(double dV)\n {\n _dPos[0][1] = dV;\n }\n inline void Bottom(double dV)\n {\n _dPos[1][0] = dV;\n }\n inline void Right(double dV)\n {\n _dPos[1][1] = dV;\n }\n inline int ProcLineNumber(void)\n {\n return _iProcLineNumber;\n }\n inline void ProcLineNumber(int iV)\n {\n _iProcLineNumber = iV;\n }\n inline int &ProcLineWidth(void)\n {\n return _iProcLineWidth;\n }\n inline void ProcLine(int *p)\n {\n SAFE_DELETE(_piProcLine);\n _piProcLine = p;\n }\n inline int *ProcLine(void)\n {\n return _piProcLine;\n }\n inline unsigned char *ProcLineImage(void)\n {\n return _pcProcLineImage;\n }\n BOOL calcProcLine(PARAM_CAM *pCamParam, int iMaxLine);\n int calcProcLine(PARAM_CAM *pCamParam, int iSrcMin, int iSrcMax, int iMaxNumber);\n inline void clearUpEdgePointNumber(void)\n {\n _iUpEdgePointNumber = 0;\n }\n inline void clearDownEdgePointNumber(void)\n {\n _iDownEdgePointNumber = 0;\n }\n inline int &UpEdgePointNumber(void)\n {\n return _iUpEdgePointNumber;\n }\n inline int &DownEdgePointNumber(void)\n {\n return _iDownEdgePointNumber;\n }\n inline int &UpEdgeOnInputlinesI(int iIdx)\n {\n return _aUpEdgeOnInputlines[iIdx][0];\n }\n inline int &UpEdgeOnInputlinesJ(int iIdx)\n {\n return _aUpEdgeOnInputlines[iIdx][1];\n }\n inline int &DownEdgeOnInputlinesI(int iIdx)\n {\n return _aDownEdgeOnInputlines[iIdx][0];\n }\n\n inline LaneMarkerPairs *getLaneMarkerPairs(void)\n {\n return _pLaneMarkerPairs;\n }\n inline void setLaneMarkerPairs(LaneMarkerPairs *p)\n {\n SAFE_DELETE(_pLaneMarkerPairs);\n _pLaneMarkerPairs = p;\n }\n\n inline int &DownEdgeOnInputlinesJ(int iIdx)\n {\n return _aDownEdgeOnInputlines[iIdx][1];\n }\n\n BOOL transformInputToRoadImage(PARAM_CAM *pCamParam, int iIsrc, int iJsrc, int *piIdst,int *piJdst);\n\n inline int checkUpEdgeOnOneline(int j)\n {\n return _piUpEdgeOnOneline[j];\n }\n inline void setUpEdgeOnOneline(int j)\n {\n _piUpEdgeOnOneline[j] = 1;\n }\n inline void clearUpEdgeOnOneline(int j)\n {\n _piUpEdgeOnOneline[j] = 0;\n }\n inline int\tcheckDownEdgeOnOneline(int j)\n {\n return _piDownEdgeOnOneline[j];\n }\n inline void setDownEdgeOnOneline(int j)\n {\n _piDownEdgeOnOneline[j] = 1;\n }\n inline void clearDownEdgeOnOneline(int j)\n {\n _piDownEdgeOnOneline[j] = 0;\n }\n\n BOOL setDownEdgeOnInputlines(int iIsrc, int iJsrc);\n BOOL setUpEdgeOnInputlines(int iIsrc, int iJsrc);\n inline BOOL setDownEdgeOnRoad(int iIsrc, int iJsrc)\n {\n if(_iDownEdgePointNumber >= MAX_EDGE_POINT_NUM)\treturn FALSE;\n _aiDownEdgeOnRoad[_iUpEdgePointNumber][0] = iIsrc;\n _aiDownEdgeOnRoad[_iUpEdgePointNumber][1] = iJsrc;\n _iDownEdgePointNumber++;\n return TRUE;\n }\n inline BOOL setUpEdgeOnRoad(int iIsrc, int iJsrc)\n {\n if(_iUpEdgePointNumber >= MAX_EDGE_POINT_NUM)\treturn FALSE;\n _aiUpEdgeOnRoad[_iUpEdgePointNumber][0] = iIsrc;\n _aiUpEdgeOnRoad[_iUpEdgePointNumber][1] = iJsrc;\n _iUpEdgePointNumber++;\n return TRUE;\n }\n int calcEdgeStrength(PARAM_CAM *pCamParam, unsigned char *pucBuf, int iIsrc, int iJsrc);\n void linefilter_DualEdge_sub(PARAM_CAM *pCamParam, int iIdx, int iJsrcMin, int iJsrcMax, unsigned char *pucBuf);\n void linefilter_DualEdge_SelectEdgePoint(PARAM_CAM *pCamParam, int iIsrc);\n void linefilter_DualEdge(PARAM_CAM *pCamParam, int iIdx, unsigned char *pucBuf);\n void linefilterForAllProclines(void);\n inline LaneSide *Side(int iLR)\n {\n return &(_Side[iLR]);\n }\n//\tinline LaneMarkerPair\t&LaneMarkerPairCandidate(int iIdx)\t{\treturn _aLaneMarkerPairCandidate[iIdx];\t}\n inline int LaneMarkerPairNumber(void)\n {\n return _iLaneMarkerPairNumber;\n }\n inline BOOL &available(void)\n {\n return _bAvailable;\n }\n inline int &RefLaneWidth(void)\n {\n return _iRefLaneWidth;\n }\n inline int &RefLaneWidth_a(void)\n {\n return _iRefLaneWidth_a;\n }\n inline int &RefLaneWidth_p(void)\n {\n return _iRefLaneWidth_p;\n }\n inline BOOL &CrossLineClearFlag(void)\n {\n return _bCrossLineClearFlag;\n }\n\n inline int getFoundCounter(void)\n {\n return _iFoundCounter;\n }\n inline void clearFoundCounter(void)\n {\n _iFoundCounter = 0;\n }\n inline void incFoundCounter(void)\n {\n _iFoundCounter++;\n }\n inline int getLostCounter(void)\n {\n return _iLostCounter;\n }\n inline void clearLostCounter(void)\n {\n _iLostCounter = 0;\n }\n inline void incLostCounter(void)\n {\n _iLostCounter++;\n }\n\n inline BOOL getFoundNowFlag(void)\n {\n return _bFoundNow;\n }\n inline void clearFoundNowFlag(void)\n {\n _bFoundNow = FALSE;\n }\n inline void setFoundNowFlag(void)\n {\n _bFoundNow = TRUE;\n }\n\n inline BOOL getFoundFlag(void)\n {\n return _bFound;\n }\n inline void clearFoundFlag(void)\n {\n _bFound = FALSE;\n }\n inline void setFoundFlag(void)\n {\n _bFound = TRUE;\n }\n\n inline void clearBallotBox(void)\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n for(int iUD = 0; iUD < UD_NUM; iUD++)\n {\n Side(iLR)->BB(iUD)->clear();\n }\n }\n }\n void linefilterForAllProclines(PARAM_CAM *pCamParam);\n void voteForAllProclines(PARAM_CAM *pCamParam);\n void vote(PARAM_CAM *pCamParam);\n void vote(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n inline void BB_diff(void)\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n LaneSide *pSide = Side(iLR);\n if(pSide == NULL)\tcontinue;\n pSide->BB_diff();\n }\n }\n void searchLaneMarkerLines(void);\n void pairLaneMarkerLines(PARAM_CAM *pCamParam, Uchar *pucInputImage, int aiTopIntensity[]);\n void pairLaneMarkers(void);\n BOOL pickupLaneMarkerPointsOfLaneMarkerLines(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, int iIsrcMin, int iIsrcMax);\n BOOL calc3DLineOfLaneMarkerLines(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n BOOL calcAverageEdgeStrengthOfLaneMarkerLines(void);\n BOOL eliminateShortLaneMarkerLines(PARAM_CAM *pCamParam);\n BOOL eliminateFewPointLaneMarkerLines(PARAM_CAM *pCamParam);\n\n BOOL searchLaneBoundary(LaneParameter *p);\n BOOL pickupLaneBoundaryPoints(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, int iIsrcMin, int iIsrcMax);\n\n inline int LineNumber(void)\n {\n return _iLineNumber;\n }\n inline void LineNumber(int iV)\n {\n _iLineNumber = iV;\n }\n inline void calcLineNumber(PARAM_CAM *pCamParam)\n {\n int iIsrcMin = calcHorizontalLineOfDepth(pCamParam, Top());\n int iIsrcMax = calcHorizontalLineOfDepth(pCamParam, Bottom());\n\n int iLineNumber = iIsrcMax - iIsrcMin + 1;\n LineNumber(iLineNumber);\n }\n BOOL checkComplexLaneBoundary(LaneParameterOneSide *pLPLeft, LaneParameterOneSide *pLPRight);\n};\n#endif // _LANEAREA_\n"
},
{
"alpha_fraction": 0.5675675868988037,
"alphanum_fraction": 0.5692055821418762,
"avg_line_length": 20.803571701049805,
"blob_id": "7fb54eaf301c0c505dda84e2a12a8184cfcad0a0",
"content_id": "d98f8a195bbd5404e6e6a669766520e84e1818c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1221,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 56,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerPair.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/type.h\"\n#include \"../utils/config.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarker.h\"\n\nclass LaneMarkerPair\n{\nprivate:\n LaneMarker\t*_pLaneMarker[LR_NUM];\n\npublic:\n inline LaneMarkerPair(void)\n {\n for(int iIdx = 0; iIdx < LR_NUM; iIdx++)\n {\n _pLaneMarker[iIdx]\t=\tNULL;\n }\n }\n inline ~LaneMarkerPair(void)\n {\n for(int iIdx = 0; iIdx < LR_NUM; iIdx++)\n {\n SAFE_DELETE(_pLaneMarker[iIdx]);\n }\n }\n inline LaneMarker\t*Left(void)\n {\n return _pLaneMarker[LR_LEFT];\n }\n inline LaneMarker\t*Right(void)\n {\n return _pLaneMarker[LR_RIGHT];\n }\n inline void\tLeft(LaneMarker *p)\n {\n SAFE_DELETE(_pLaneMarker[LR_LEFT]);\n _pLaneMarker[LR_LEFT] = p;\n }\n inline void\tRight(LaneMarker *p)\n {\n SAFE_DELETE(_pLaneMarker[LR_RIGHT]);\n _pLaneMarker[LR_RIGHT] = p;\n }\n inline LaneMarker\t*getLaneMarker(int iLR)\n {\n return _pLaneMarker[iLR];\n }\n inline void getLaneMarker(int iLR, LaneMarker *p)\n {\n SAFE_DELETE(_pLaneMarker[iLR]);\n _pLaneMarker[iLR] = p;\n }\n};\n\ntypedef LaneMarkerPair * ptrLaneMarkerPair;\n"
},
{
"alpha_fraction": 0.6044283509254456,
"alphanum_fraction": 0.6180241107940674,
"avg_line_length": 25.539518356323242,
"blob_id": "14e1364669c57e199bcb7a77ee90f84164f0e520",
"content_id": "c74968a0cb6fecd83b6cf1c8b7204ffea2bbcae7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 10263,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 291,
"path": "/athena/core/x86/Navi/include/route_data.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n* @file route_data.h\n* @brief 导航模块的公用数据\n* @details 包含对点、规划模块数据等描述\n* @author huanhuan\n* @date 2018/7/16\n* @version v1.0\n* @par Copyright (c):\n* 武汉环宇智行科技有限公司\n* @par History:\n* version: author, date, desc\\n\n*/\n#ifndef _ROUTE_DATA_H_\n#define _ROUTE_DATA_H_\n\n#include \"heading.h\"\n#include \"math_util.h\"\n#include \"config/Config.h\"\n#include \"MapInterface.h\"\n#include \"nad_retcode.h\"\n#include \"nad_enum.h\"\n\n\nnamespace athena\n{\nnamespace route\n{\n//typedef boost::graph_traits<Graph>::edge_iterator EdgeIterator;\ntypedef std::pair<double,double> xy_point;\nconst double RADIANS_PER_LSB = 0.0174532925;\n\n\n/**\n* @brief coord_transfer,完成经纬度和高斯投影面xy之间坐标的转换。\n* 包含原点的设定\n*/\nclass coord_transfer\n{\npublic:\n LocalGeographicCS cs;\n\n coord_transfer();\n coord_transfer(std::string filename);\n coord_transfer(double lat, double lon);\n ~coord_transfer();\n\n void set_origin(std::string filename);\n void set_origin(double lat, double lon);\nprivate:\n Config configSettings;\n double origin_lat = 0;///<投影原点纬度.\n double origin_lon = 0;///<投影原点经度.\n\n\n};\n\n\n/**\n * @brief 路径规划的公共结构\n * 地图上的关键点(但地图本身并不存关键点)\n */\nclass key_point\n{\npublic:\n double lon; ///<经度\n double lat; ///<纬度\n double yaw; ///<角度\n int32_t type; ///<关键点类型,枚举值KP_XXX,忽略则取值KP_UNKNOWN=0\n};\n\n\n/**\n* @brief 投影坐标系下x,y点\n*/\nstruct Point_m\n{\n double x;\n double y;\n};\n\n/**\n* @brief 形点边界线(专供planning使用)\n* 约束可行驶区域,道路边界等\n*/\nclass point_xys\n{\npublic:\n int8_t type; ///<点类型,依业务类型而定\n float x; ///<x坐标,单位m\n float y; ///<y坐标,单位m\n};\n\n/**\n * 路径下发消息\n * 中路中心上的点(专供motion使用)\n */\nclass point_m\n{\npublic:\n int8_t type; ///<点类型: KP_NONE=禁行点, KP_NORMAL=一般点, KP_CHANGE_LANE_LEFT=向左换道点, KP_CHANGE_LANE_RIGHT=向右换道点\n float x; ///<x坐标,单位m\n float y; ///<y坐标,单位m\n float yaw; ///<道路头指向(相对正北的夹角)\n float k; ///<道路曲率,单位deg/m\n float mileage; ///<相对起点的里程,单位m\n float width; ///<道路宽度,单位m\n int8_t sug_speed;///<推荐速度,单位km/h\n};\n\n/// 以左侧第一车道头指向的垂线截取的道路切片(专供motion使用)\nclass section_m\n{\npublic:\n int8_t num_of_lane; ///<切片上的道路数量\n std::vector< point_m > lane; ///<每个切片上的道路中点\n};\n\n/**\n* @brief 形点边界线(专供planning使用)\n* 约束可行驶区域,道路边界等\n*/\nclass line_xys\n{\npublic:\n int32_t num_of_point; ///<点的数量\n std::vector<point_xys> line; ///<点集\n};\n\n/**\n* @brief 规划出来的道路中线点\n* 包含每个点的坐标、里程、所在车道的id等\n*/\nclass center_point\n{\npublic:\n double x; ///<相对原点的x坐标,单位m,可和经纬度转化\n double y; ///<相对原点的y坐标,单位m,可和经纬度转化\n int32_t index; ///<在center_point_list中的下标\n double yaw; ///<头指向\n double width; ///<路宽\n double mileage; ///<相对起点的里程,单位m\n int64_t lane_id; ///<当前lane的id\n int8_t type[MAX_KP_TYPE]; ///<枚举值KP_XXX\n int32_t left_lane_num; ///<左边车道的数目\n int32_t right_lane_num;///<右边车道的数目\n std::string id; ///<key_point专用,索引对象,例如light_id,无需索引的填\"\"\n int32_t road_level; ///<指定道路等级\n\npublic:\n center_point(); ///<构造函数\n double dist(center_point &cp);///<距离某个点的距离\n};\n\n///用于依据limspeed_id在vector<limspeed> limspeed_list中查找对应的limspeed\nclass LimspeedInLimspeedList {\npublic:\n LimspeedInLimspeedList(const std::string &id)\n {\n this->id = id;\n }\n bool operator () (const athena::roadmap::limspeed& limspeed_t) {\n return (limspeed_t.limspeed_id == id)?true:false;\n }\nprivate:\n std::string id;\n};\n\n\n/**\n* @brief 路径规划\n* 包含导航规划所经过的车道序列以及关键点等\n*/\nclass route_planning\n{\npublic:\n int16_t num_of_lane; ///<路径节点数量\n std::vector< athena::roadmap::lane > lane_list; ///<路径节列列表\n int16_t num_of_kp; ///<关键点数量\n std::vector< center_point > key_point_list;///<关键点列表,不包含KP_NONE=-1的点,包含规划信息(所以没有使用key_point结构)\n int64_t time_stamp; ///<产生此路径规划的时间,gettimeofday获得的毫秒数\n std::string destination; ///<目的地名称\n int32_t route_reason; ///<路径规划原因,参考枚举值:ROUTE_REASON_XXX,可用来区分初始规划/重规划、车道规划/道路规划\n};\n\n\n/**\n* @brief 路径规划(专供planning使用)\n* 包含导航规划所经过的车道序列以及关键点等\n*/\nclass route_planning_m\n{\npublic:\n ///<以车辆当前位置(cur_section)为原点,分段规划信息\n int64_t time_stamp; ///<产生此路径规划的时间,gettimeofday获得的毫秒数\n std::string destination; ///<规划的目的地\n int32_t route_reason; ///<规划原因\n float mileage_pass; ///<相对车过去的里程,单位m,默认500m\n float mileage_next; ///<相对车未来的里程,单位m,默认1000m\n float mileage_start; ///<距离起点的里程,单位m\n float mileage_stop; ///<距离终点的里程,单位m\n ///<可行驶区域 left_edge;right_edge;需要在地图中指明\n line_xys left_edge; ///<左边界线,超出此线可能撞马路牙子\n line_xys right_edge; ///<右边界线,超出此线可能撞马路牙子\n line_xys left_line; ///<最左车道的左边线,超出此线可能逆行\n line_xys right_line; ///<最右车道的右边线,通常right_line和right_edge之间为停车带\n ///<中线:按车辆行驶方向的每个切片的中点\n int32_t num_of_section; ///<这段道路上的切片数量\n std::vector< section_m > line; ///<切片列表\n int32_t cur_section; ///<规划时车在哪个切片上\n};\n#define MILEAGE_PASS 500 ///<每次切片下发已走里程,单位:m\n#define MILEAGE_NEXT 1000 ///<每次切片下发未走里程,单位:m\n\nclass Route_Behavior\n{\npublic:\n ///基于关键点的行为\n std::vector<center_point> center_line_; ///<最左边车道的中心线 相当于道路级的规划 用于与道路级事件匹配\n std::vector<center_point> key_points_; ///<从起点到终点的关键点,道路级事件\n std::vector<roadmap::limspeed> limspeed_list_; ///限速牌列表\n std::vector<roadmap::lane> lanelist_;\n\n int32_t cur_center_line_index_; ///<车辆当前在center_line[cur_center_line_index]附近\n int32_t cur_key_point_index_; ///<车辆当前在key_points[cur_key_point_index]附近\n center_point cur_point_; ///<在route中的当前点,数据来自center_line[cur_center_line_index]\n int32_t limspeed_value; //限速值,1000表示没有限速牌,单位:km/h\n\n ///把center_line中的关键点保存到key_ponts中\n void get_keypoint_on_center_line();\n\n ///获得重规划是需要的key_point(只需要当前点、终点、NORMAL点,不要ets、障碍物、换道点)\n void get_keypoint_for_replanning(std::vector<center_point> &key_point_list);\n\n ///获得规划是需要的key_point(只需要起点、终点、NORMAL点,不要ets、障碍物、换道点)\n void get_keypoint_for_planning(std::vector<center_point> &key_point_list);\n\n ///删除可以动态绑定的关键点(只保留起点、终点、NORMAL点)\n void clear_active_keypoint_on_center_line();\n\n ///查找下一个关键点\n center_point *get_next_kp(int type1, int type2 = KP_NONE,\n int type3 = KP_NONE, int type4 = KP_NONE, int type5 = KP_NONE, int type6 = KP_NONE);\n\n ///查找上一个关键点\n center_point *get_pre_kp(int type1, int type2 = KP_NONE,\n int type3 = KP_NONE, int type4 = KP_NONE, int type5 = KP_NONE, int type6 = KP_NONE);\n\n ///从头开始全程查找关键点\n center_point *get_kp(int type1, int type2 = KP_NONE,\n int type3 = KP_NONE, int type4 = KP_NONE, int type5 = KP_NONE, int type6 = KP_NONE);\n\n ///查找以cur_point为原点,前后多少米的点,负数为前(向起点),整数位后(向终点)\n center_point *get_cp(double diff);\n\n ///查找下一个停止点\n center_point *get_next_stop_kp(int right_of_way);\n\n ///查找下一个换道点,返回CL_DIRECTION_LEFT/CL_DIRECTION_RIGHT\n center_point *get_next_change_kp(int &direction);\n\n ///查找当前点是否禁止换道\n bool get_current_forbid_change();\n\n ///获得起点指针\n center_point *start_kp();\n\n ///获得终点指针\n center_point *stop_kp();\n\n ///获得到起点的距离(起点之前是负数,起点之后是正数,cp==NULL则用cur_point)\n double start_dist(center_point *cp = NULL);\n\n ///获得到终点的距离(终点之前是正数,终点之后负正数,cp==NULL则用cur_point)\n double stop_dist(center_point *cp = NULL);\n\n ///设置lanelist\n void set_lanelist(std::vector<roadmap::lane> &lanelist);\n\n ///将限速牌绑定到中心线\n int bind_limspeed_on_center_line(std::string limspeed_id, int64_t lane_id, double &dis_min);\n\n ///查找当前生效的限速点,同时修改了limspeed_value,返回生成速度的limspeed_id或lane_id\n int get_current_limspeed(std::string &id, double diff = 0.0);\n\n ///查找限速牌\n roadmap::limspeed *find_limspeed(std::string limspeed_id);\n};\n}\n}\n\n#endif // _ROUTE_DATA_H_\n"
},
{
"alpha_fraction": 0.5122448801994324,
"alphanum_fraction": 0.5326530337333679,
"avg_line_length": 16.464284896850586,
"blob_id": "601c8e88df5de9eb4b39e99937f5fb26df1af1f3",
"content_id": "9177583f5328e2ba1c63398c29d5561cdca45a50",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 512,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 28,
"path": "/athena/core/arm/Control/include/common/map_matching/circle.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <math.h>\n#include <vector>\n\n//#include \"navi_point.h\"\n\nusing namespace std;\n\n// 点的位置\nclass circle\n{\npublic:\n // 传感器相对坐标\n double R;\n double ks;\n\n};\n\n int get_circle(double x1, double y1,\n double x2, double y2,\n double x3, double y3,\n double& x, double& y,\n double& r, double& ks);\n\n int sign_circle_e(double x, double y,\n double x1, double y1,\n double x2, double y2);\n\n"
},
{
"alpha_fraction": 0.6162683963775635,
"alphanum_fraction": 0.625,
"avg_line_length": 27.63157844543457,
"blob_id": "53620bd72b520a6beda8201a0694d2d5203a9df4",
"content_id": "b763a84886dc88aac46b7fd8ed82dc5b1680bc98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2664,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 76,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/ssd_detection/vision_detect_node.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/// //////////////////////////////////////////////\n///@file 加载caffe-ssd网络模型和权重文件\n///@brief 读取图片方法 目标框列表 相机自身配置参数\n///@author duohaoxue\n///@version v1.0\n///@date 2018.07.17\n/// //////////////////////////////////////////////\n#pragma once\n#include \"ssd_detection/vision_detector.hpp\"\n#include \"ssd_detection/camera_obj_list.hpp\"\n#include \"ssd_detection/distance_calculation.hpp\"\nusing namespace caffe;\nusing namespace std;\n\nclass Visiondetect\n{\npublic:\n ///卷积特征目标检测 Caffe based Object Detection ConvNet\n\tDetector* ssd_detector_;\n\t///是否用GPU Sets whether or not use GPU acceleration\n\tbool use_gpu;\n\t///选用GPU的id 编号 If GPU is enabled, stores the GPU Device to use\n\tunsigned int gpu_device_id;\n /// 输入均值文件\n string mean_file_;\n /// 图片均值像素\n string pixel_mean_;\n ///网络模型文件\n string network_Model ;\n /// 训练权重文件\n string pretrained_Weights ;\n\n string camera_config_file;\n /// 加载配置参数\n void read_config_value_from_file();\n /** 卷积特征转化为图片目标框\n * @param p1 输入需检测图片\n * @param p2 输出检测后的目标物结果\n **/\n void convert_rect_to_image_obj(cv::Mat &image,vector<camera_obj> &out_camera_objs);\n\n ~Visiondetect();\n\n\t Visiondetect(string config_file);\nprivate:\n ///计算相机到目标物体距离\n Calculate_distance cal_distance;\n ///过滤所需置信度最低阀值 The minimum score required to filter the detected objects by the ConvNet\n\tfloat min_score_threshold;\n /// 最小非极大值抑制\n\tfloat nms_threshold;\n /// 是否显示出BoundingBox\n bool draw_flag;\n /// 是否跟踪\n bool use_track;\n /** 画出图像目标框\n * @param p1 输入显示图片\n * @param p2 输入检测后的矩形框\n **/\n void Draw_obj_from_image(cv::Mat &image,std::vector<camera_obj> &get_camera_objs);\n /** 滤除误识别目标框\n * @param p1 输入所需滤除目标\n **/\n std::vector<camera_obj> filter_get_objs(std::vector<camera_obj> &get_camera_objs);\n /** 滤除重叠目标框,保留最大框 非极大值抑制\n * @param p1 跟踪后预测目标\n * @param p2 最低非极大值抑制值\n **/\n void ApplyNonMaximumSuppresion( std::vector<camera_obj> &in_source, float in_nms_threshold);\n /** 基于置信度由高到低排序\n * @param p1 输入检测后置信度\n * @param p2 相对应分数索引号\n **/\n void Sort(const std::vector<float> in_scores, std::vector<unsigned int>& in_out_indices);\n\n};\n"
},
{
"alpha_fraction": 0.5577213764190674,
"alphanum_fraction": 0.5905107259750366,
"avg_line_length": 21.664382934570312,
"blob_id": "c6c506c4cb63ab40e25fded356af029e7999e9a5",
"content_id": "c8c730806897310ae3600964823cbc5189a425bd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8116,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 292,
"path": "/athena/examples/LCM/Singlecar/control/apps/control.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_CONTROL_H_\n#define APPS_CONTROL_H_\n\n#include \"message_manger/message_manger.h\"\n#include \"message_manger/lcm/lcm_message_manger.h\"\n#include \"../common/logging.h\"\n#include \"../common/Config.h\"\n#include \"../common/enum.h\"\n#include \"../common/control_info_report.h\"\n#include \"../common/color_init.h\"\n#include \"../control_logic/control_logic.h\"\n#include \"../control_logic/control_logic_config.h\"\n#include \"controller.h\"\n#include \"track_trajectory/track_trajectory.h\"\n#include \"control_debug/control_debug.h\"\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class Control\n * @brief 控制类.\n */\nclass Control:public Thread{\n public:\n Control(std::string software_version);\n ~Control() = default;\n\n /**\n * @brief 程序执行入口.\n * @return void.\n */\n void Init();\n\n\n private:\n ///软件版本\n std::string software_version_;\n ///循迹\n TrackTrajectory *track_trajectory_;\n ///配置文件路径\n const std::string config_file_path_ = \"config/control.cfg\";\n ///配置文件操作类\n Config *main_config_file_;\n ///车辆配置文件操作类\n Config *vehicle_config_file_;\n ///车辆配置文件路径\n std::string vehicle_config_file_path_;\n ///控制逻辑设置\n ControlLogicConfig control_logic_config_;\n ///LCM消息控制器\n LcmMessageManger lcm_message_manger_;\n ///消息控制器\n MessageManger *message_manger_;\n ///控制命令\n ControlCmd control_cmd_;\n ///BCM控制命令\n BcmControlCmd bcm_control_cmd_;\n ///控制信息\n ControlInfoReport control_info_report_;\n ///控制逻辑-\n ControlLogic control_logic_;\n ///控制逻辑调试输出\n ControlLogicDebugOutput control_logic_debug_output_;\n ///消息控制器注册\n void ResigerMessageManger(MessageManger *message_manger);\n\n\n /**\n * @brief 发送控制输出.\n * @return void.\n */\n void PublishControlOutputOnTimer();\n /**\n * @brief 读配置文件\n * @param[in] file_path 配置文件路径..\n * @return void.\n */\n void ReadConfigFile();\n\n /**\n * @brief 控制消息发布.\n * @param[in] control_cmd 控制输出.\n * @return void.\n */\n void PublishControlCmd(ControlCmd control_cmd);\n\n /**\n * @brief BCM控制信息发送.\n * @param[in] bcm_control_cmd BCM控制信息.\n * @return void.\n */\n void PublishBcmControlCmd(BcmControlCmd bcm_control_cmd);\n\n /**\n * @brief 控制消息上报.\n * @param[in] control_info_report 控制信息.\n * @return void.\n */\n void PublishControlInfoReport(ControlInfoReport control_info_report);\n\n /**\n * @brief 打印调试.\n * @return void.\n */\n void print();\n\n /**\n * @brief 日志调试.\n * @return void.\n */\n void log();\n\n public:\n ///调试开关\n int32_t debug_enable_;\n ///打印记录开关\n int32_t print_enable_;\n ///日志记录开关\n int32_t log_enable_;\n ///日志打印周期\n int32_t debug_print_period_;\n ///日志记录周期\n int32_t debug_log_period_;\n\n private:\n ///GUI调试\n int32_t gui_enable_;\n\n ///循迹\n int32_t init_map_from_file_;\n ///本地轨迹路径\n std::string local_trajectory_path;\n\n ///消息类型\n int32_t message_type_;\n\n ///车辆类型\n int32_t vehicle_type_;\n ///车辆类型名\n std::string vehicle_type_name_;\n\n /// 横向参数\n ///横向控制动态kp值\n double moving_kp_ = 3500;\n /// 横向控制kp计算\n double lat_kp_value_=2.0;\n /// 横向控制kI计算\n double lat_ki_value_=0.0;\n /// 横向控制kd计算\n double lat_kd_value_=0.0;\n /// 纵向控制kp计算\n double lon_kp_value_=2.0;\n /// 纵向控制kI计算\n double lon_ki_value_=0.0;\n /// 纵向控制kd计算\n double lon_kd_value_=0.0;\n\n //动态kp值变化 suggest_kp = steer_angle/kp_slope_ + kp_value_\n double kp_slope_;\n double kp_value_;\n ///预描距离\n double xla_;\n /// 横向->前馈参数\n /// 轮子到方向盘的传动比(默认:11.5,该值即影响前馈,也影响反馈)\n double k_trans_ = 8.2;\n /// 横向->反馈参数\n /// 位置误差比重\n double k_e_err_ = 2.1;\n /// 角度误差比重\n double k_fi_err_ = 1.0;\n /// 总误差比重(反馈权重,默认1,长安1.28)\n double k_ela_ = 1.28;\n /// 方向盘右转最大角度\n double max_steering_angle_ = 431;\n /// 方向盘左转最大角度\n double min_steering_angle_ = -445;\n /// 最高转向速度,300度/秒\n double max_steering_speed_ = 300.0;\n\n /// 纵向参数\n /// 纵向->限速参数\n /// 最高速度10m/s\n double vechile_speed_max_ = 10.0;\n /// 最大油门值\n double acc_output_mv_max_ = 40.0;\n /// 最小油门值\n double acc_output_mv_min_ = 0.0;\n /// 纵向->刹车参数\n /// 电子驻车使能,默认为1\n int32_t epb_config_enable_flag_ = 1;\n ///最大刹车值\n double max_brake_value_ = -3;\n ///怠速时最大减速度\n double max_deceleration_in_idle_ = -0.8;\n /// 纵向->车队参数\n /// 编队模式刹车前馈系数,默认1.0\n double platoon_brake_forward_k_ = 1.0;\n\n /// 地图匹配\n /// 地图最少匹配点:30个(3米),少于3米的地图规划,将不响应\n double map_point_lim_min_ = 30;\n /// 地图最多匹配点:50000个(5公里),多于5公里的地图规划,将不响应\n double map_point_lim_max_ = 50000;\n /// 地图原点.纬度\n double origin_lat_ = 31.281675599;\n /// 地图原点.经度\n double origin_lon_ = 121.16317409;\n\n /// 车身参数(常量)\n ///车辆高度,单位m\n double vehicle_height_ = 1.727;\n /// 轴距长度,单位m\n double vehicle_length_ = 2.64;\n ///车辆宽度\n double vehicle_width_= 1.575;\n ///轮距\n double wheelbase_ = 1.720;\n /// 前轴距长度,单位m\n double vehicle_l_front_ = 1.2;\n /// 后轴距长度,单位m\n double vehicle_l_after_ = 1.64;\n /// 车身质量,单位kg\n double vehicle_weight_ = 1577.0;\n /// 前轮侧偏刚度\n double vehicle_cf_ = 190000.0;\n /// 后轮侧偏刚度\n double vehicle_cr_ = 210000.0;\n ///车轮半径 单位mi\n double vehicle_wheel_radius_ = 0.335;\n\n ///位置误差门限值\n double max_position_error_ = 2;\n\n ///自动驾驶模式\n ///工作模式调试 可以单独进行横向或者纵向控制\n int32_t debug_driving_mode_;\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_steering_driving_mode_;\n ///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_acc_driving_mode_;\n ///刹车控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_brake_driving_mode_;\n ///EPB控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_epb_driving_mode_;\n ///控制器选择\n int32_t controller_switch_;\n ///LQR Q加权矩阵\n std::vector<double> lqr_matrix_q_;\n ///LQR离散时长\n double lqr_ts_=0.01;\n ///LQR预测窗口大小\n double lqr_preview_window_=0;\n ///LQR计算阀值\n double lqr_eps_=0.01;\n ///LQR滤波器窗口大小\n double lqr_mean_filter_window_size_=10;\n ///LQR最大迭代次数\n double lqr_max_iteration_=150;\n ///LQR横向最大加速度\n double lqr_max_lateral_acceleration_=5.0;\n ///最小速度保护\n double lqr_minimum_speed_protection_=0.1;\n ///\n int32_t lqr_cutoff_freq_;\n ///横向误差调节器 避免误差过大的时候有较大调节\n std::vector<Scheduler> lqr_lat_err_scheduler_init_;\n ///航向角误差调节器 避免误差过大的时候有较大调节\n std::vector<Scheduler> lqr_heading_err_scheduler_init_;\n\n private:\n ControlDebug<Control> *control_debug_;\n void run();\n};\n}\n}\n\n#endif // APPS_CONTROL_H_\n"
},
{
"alpha_fraction": 0.6746411323547363,
"alphanum_fraction": 0.6937798857688904,
"avg_line_length": 12.866666793823242,
"blob_id": "d4072bded1b1e56565feccce1cbab2a1ccd8b4e1",
"content_id": "867a62c6a9fee7fb018d6dcdfc07376c2e01bc09",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 209,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 15,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/gear/cs55/cs55_gear_control.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#include \"cs55_gear_control.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\nGearLevel CS55GearControl::GetGearLevel()\n{\n return POSITION_P;\n}\n}\n}\n"
},
{
"alpha_fraction": 0.563065767288208,
"alphanum_fraction": 0.5654797554016113,
"avg_line_length": 20.519479751586914,
"blob_id": "7ef0fbcf132a271ead246515980f415a63ff08a9",
"content_id": "3f1e85d73921d72f220c26d8679d72c3c3942bed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1657,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 77,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarker.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/config.h\"\n#include \"LaneMarkerLine.h\"\n#include \"LaneDetectorTools.h\"\n\nclass LaneMarker\n{\nprotected:\n LaneMarkerLine\t*_pLM[UD_NUM];\n int _iAverageIntensity;\n\npublic:\n inline LaneMarker(void)\n {\n _pLM[UD_UP] = NULL;\n _pLM[UD_DOWN] = NULL;\n _iAverageIntensity = -1;\n }\n\n inline LaneMarker(LaneMarker *p)\n {\n for(int iUD = 0; iUD < UD_NUM; iUD++)\n {\n _pLM[iUD] = new LaneMarkerLine(p->getLaneMarkerLine(iUD));\n }\n _iAverageIntensity = p->AverageIntensity();\n }\n\n inline LaneMarker(LaneMarkerLine *pLMLUp, LaneMarkerLine *pLMLDown)\n {\n _pLM[UD_UP] = new LaneMarkerLine(pLMLUp);\n _pLM[UD_DOWN] = new LaneMarkerLine(pLMLDown);\n _iAverageIntensity = -1;\n }\n\n inline ~LaneMarker(void)\n {\n for(int iUD = 0; iUD < UD_NUM; iUD++)\n {\n SAFE_DELETE(_pLM[iUD]);\n }\n }\n inline LaneMarkerLine\t*Up(void)\n {\n return _pLM[UD_UP];\n }\n inline LaneMarkerLine\t*Down(void)\n {\n return _pLM[UD_DOWN];\n }\n inline void\tUp(LaneMarkerLine *p)\n {\n _pLM[UD_UP] = p;\n }\n inline void\tDown(LaneMarkerLine *p)\n {\n _pLM[UD_DOWN] = p;\n }\n inline LaneMarkerLine *getLaneMarkerLine(int iUD)\n {\n return _pLM[iUD] ;\n }\n inline void setLaneMarkerLine(int iUD, LaneMarkerLine *pLML)\n {\n _pLM[iUD] = pLML;\n }\n inline int AverageIntensity(void)\n {\n return \t_iAverageIntensity;\n }\n inline void AverageIntensity(int iV)\n {\n _iAverageIntensity = iV;\n }\n};\n\ntypedef LaneMarker * ptrLaneMarker;\n"
},
{
"alpha_fraction": 0.5194690227508545,
"alphanum_fraction": 0.527876079082489,
"avg_line_length": 21.376237869262695,
"blob_id": "382e1068ac39f0ad61720d1843ebcbddf9dfbca3",
"content_id": "ec7b25a891a8a9f6834f98f760b584ea5417dae6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2648,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 101,
"path": "/athena/core/arm/Common/include/base/config/nad_config.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_config.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:配置的代码\n-------------------------------------------------------*/\n#ifndef _NAD_CONFIG_H\n#define _NAD_CONFIG_H\n\n\n//引用base头文件\n#include \"nad_base.h\"\n\n//一个网元的配置\nclass nad_ne_config\n{\npublic:\n string name; //网元名称\n string ip; //网元间通讯ip,用于zmq\n int port; //网元间通讯端口,用于zmq\n string lcm_url; //网元内通信的组播地址,用于lcm\n\n //从xml文件中加载\n void load_from_xml(pugi::xml_node &node);\n\n //重载赋值\n nad_ne_config & operator = (const nad_ne_config &ne)\n {\n name = ne.name;\n ip = ne.ip;\n port = ne.port;\n lcm_url = ne.lcm_url;\n return *this;\n }\n\n //重载打印输出\n friend inline ostream & operator << (ostream & os, nad_ne_config &ne)\n {\n cout << \"name=\" << ne.name << \", ip=\" << ne.ip << \", port=\" << ne.port << \", lcm_url=\" << ne.lcm_url << endl;\n return os;\n }\n};\n\n//网元配置数组\nclass nad_ne_config_list\n{\npublic:\n //网元配置数组\n map<string, nad_ne_config> ne_map;\n\npublic:\n //查找网元\n nad_ne_config *find(string name);\n\n //从xml文件中加载\n void load_from_xml(pugi::xml_node &node);\n\n //重载打印输出\n friend inline ostream & operator << (ostream & os, nad_ne_config_list &ne)\n {\n int i = 1;\n map<string, nad_ne_config>::iterator iter;\n for(iter = ne.ne_map.begin(); iter != ne.ne_map.end(); iter++)\n {\n cout << \" \" << i++ << \": \" << iter->second;\n }\n return os;\n }\n};\n\n//CONFIG(控制块)的基类\nclass nad_config\n{\npublic:\n //网元配置\n nad_ne_config local; //自身信息\n nad_ne_config csu; //CSU信息\n nad_ne_config sim; //SIM信息\n nad_ne_config rsu1; //第一个RSU的信息,应对目前只有一个RSU的场景\n nad_ne_config oct1; //第一个OCT的信息\n nad_ne_config_list rsu_list; //RSU列表\n nad_ne_config_list obu_list; //OBU列表\n nad_ne_config_list rsd_list; //RSD列表\n nad_ne_config_list oct_list; //OCT列表\n\n string ne_local_channel; //网元间消息的本地信道名\n\npublic:\n //加载网元名,成功返回true\n bool load_lcoal(pugi::xml_document &doc,\n const char *cfg_name, nad_ne_config *cfg, nad_ne_config_list *cfg_list);\n\n //从文件中加载配置\n int load_from_file(string filename);\n\n //调试一下\n void show();\n};\n\n\n#endif\n"
},
{
"alpha_fraction": 0.6051282286643982,
"alphanum_fraction": 0.6256410479545593,
"avg_line_length": 15.714285850524902,
"blob_id": "bf6a30a8b74596c961179fbfbef09f2094b001d4",
"content_id": "4476963c8cbf54a711667c4020c44c31198aefb4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 591,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 35,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/gear/gear_control.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file gear_control.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROL_LOGIC_GEAR_GEAR_CONTROL_H_\n#define CONTROL_LOGIC_GEAR_GEAR_CONTROL_H_\n\n#include \"../../common/enum.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class Control\n * @brief 控制类.\n */\nclass GearControl{\n public:\n GearControl() = default;\n ~GearControl() = default;\n\n virtual GearLevel GetGearLevel() = 0;\n};\n}\n}\n#endif //CONTROL_LOGIC_GEAR_GEAR_CONTROL_H_\n"
},
{
"alpha_fraction": 0.5815533995628357,
"alphanum_fraction": 0.6058252453804016,
"avg_line_length": 17.727272033691406,
"blob_id": "ddb46b15e240b037b88a0921a82e054a1b9a0fc6",
"content_id": "08e2b6469defcb363c6d8b3c27a034b6acaa743a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1118,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 55,
"path": "/athena/examples/LCM/Singlecar/control/common/emergency.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file emergency.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_EMERGENCY_H_\n#define COMMON_EMERGENCY_H_\n\n#include \"chassis.h\"\n#include \"trajectory.h\"\n#include \"localization.h\"\n#include \"controller_output.h\"\n#include \"controller.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n/**\n * @class ControlCmd\n * @brief 控制命令.\n */\nclass Emergency\n{\npublic:\n Emergency() = default;\n ~Emergency() =default;\n typedef enum\n {\n NORMAL = 0,\n EMERGENCY_BRAKING = 1,\n }EmergencyMode;\n\n typedef enum\n {\n OK = 0,\n EMERGENCY_ONLY_IN_AUTO_DRIVING= 1,\n ALL_SITUATION = 2,\n }EmergencyLevel;\n /// 紧急模式 0 非紧急状态 1 急\n int32_t emergency_mode_;\n ///紧急刹车等级,0 无紧急模式 1 仅在自动驾驶时有 2 所有状态下都有\n int32_t emergency_level_;\n ///刹车值\n double emergency_value_;\n};\n}//namespace control\n}//namespace athena\n#endif //COMMON_EMERGENCY_H_\n"
},
{
"alpha_fraction": 0.5452922582626343,
"alphanum_fraction": 0.5537706613540649,
"avg_line_length": 18.30172348022461,
"blob_id": "39e0a4ae073715583fbbd5980ee9bc60c5449d04",
"content_id": "8bf172cf1d819117e690910659ed9551b59bcf2c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2505,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 116,
"path": "/athena/core/arm/Control/include/pid/pid_controller.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#include <iostream>\n#include <time.h>\n\nusing namespace std;\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class PIDController\n *\n * @brief PID controller, 计算PID输出值.\n */\nclass PIDController\n{\nprivate:\n ///比例常数 Proportional\n double p_value;\n ///积分常数 Integral\n double i_value;\n ///微分常数 Derivative\n double d_value;\n ///上一次误差值\n double last_error;\n ///p比例计算值\n double p_factor_value;\n ///i比例计算值\n double i_factor_value;\n ///d比例计算值\n double d_factor_value;\n ///计时起始时间\n struct timespec tpstart;\n ///计时截止时间\n struct timespec tpend;\n\n /**\n * @brief get_timer_tick.\n * @return 时间差(毫秒).\n */\n double get_timer_tick();\n\npublic:\n PIDController()\n {\n last_error = 0; //Error[-1]\n p_value = 0; //比例常数 Proportional Const\n i_value = 0; //积分常数 Integral Const\n d_value = 0; //微分常数 Derivative Const\n\n\n p_factor_value = 0;//p比例计算值\n i_factor_value = 0;//i比例计算值\n d_factor_value = 0;//d比例计算值\n }\n ~PIDController(){};\n\n /**\n * @brief get_pid_value 获取PID调节P I D值.\n * @param[in] p p值.\n * @param[in] i i值.\n * @param[in] d d值.\n * @return void.\n */\n void get_pid_value(double &p,double &i,double &d);\n\n /**\n * @brief get_pid_p_factor_value 获取P值计算值.\n * @return P值计算值.\n */\n double get_pid_p_factor_value(void);\n\n /**\n * @brief get_pid_i_factor_value 获取I值计算值.\n * @return I值计算值.\n */\n double get_pid_i_factor_value(void);\n\n /**\n * @brief get_pid_d_factor_value 获取D值计算值.\n * @return D值计算值.\n */\n double get_pid_d_factor_value(void);\n //add end\n\n /**\n * @brief ComputeControlOutput.\n * @param[in] kp p值.\n * @param[in] ki i值.\n * @param[in] kd d值.\n * @return void.\n */\n void set_pid(double kp,double ki,double kd);\n\n /**\n * @brief pid_calculate.\n * @param[in] tar_value 目标值.\n * @param[in] current_value 当前值.\n * @return PID输出值.\n */\n double pid_calculate(double tar_value,double current_value);\n};\n}\n}\n\n\n"
},
{
"alpha_fraction": 0.6264482736587524,
"alphanum_fraction": 0.6340391635894775,
"avg_line_length": 22.61320686340332,
"blob_id": "44c1e0b1726348a4afd17cea78df1937b63bf1f5",
"content_id": "7daf8af32a96fae7d09c4ef369c56396ccd341bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2504,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 106,
"path": "/athena/core/arm/Map/include/lanelet_point.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n *\n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include <boost/math/special_functions.hpp>\n#include <cmath>\n#include \"LocalGeographicCS.hpp\"\n#include \"normalize_angle.hpp\"\n\nnamespace LLet\n{\n\nenum LATLON_COORDS\n{\n LAT = 0,\n LON = 1,\n ID = 2\n};\n\nenum XY_COORDS\n{\n X = 0,\n Y = 1\n};\n\ntypedef std::tuple< double, double, int64_t > point_with_id_t;\n\ninline\nstd::pair< double, double > vec( const point_with_id_t& a, const point_with_id_t& b )\n{\n using std::get;\n\n LocalGeographicCS cs(get<LAT>(a), get<LON>(a));\n\n double ax, ay, bx, by;\n std::tie(ax, ay) = cs.ll2xy(get<LAT>(a), get<LON>(a));\n std::tie(bx, by) = cs.ll2xy(get<LAT>(b), get<LON>(b));\n\n double dx = bx - ax;\n double dy = by - ay;\n\n return std::make_pair(dx, dy);\n}\n\ninline\ndouble abs( const std::pair< double, double > v )\n{\n using std::get;\n using boost::math::hypot;\n return hypot( get<X>(v), get<Y>(v) );\n}\n\ninline\ndouble dist( const point_with_id_t& a, const point_with_id_t& b )\n{\n return abs(vec(a, b));\n}\n\ninline\ndouble scalar_product( const std::pair< double, double >& a, const std::pair< double, double >& b )\n{\n using std::get;\n return get<X>(a) * get<X>(b) + get<Y>(a) * get<Y>(b);\n}\n\ninline\ndouble angle( const std::pair< double, double >& a, const std::pair< double, double >& b )\n{\n using std::get;\n\n double sp = scalar_product(a, b);\n double cos_phi = sp / (abs(a) * abs(b));\n\n // sign for angle: test cross product\n double crossp_z = get<X>(a) * get<Y>(b) - get<Y>(a) * get<X>(b);\n double signum = boost::math::sign(crossp_z);\n double phi = normalize_angle(signum * std::acos(cos_phi));\n return phi;\n}\n\n\ntemplate< typename T1, typename T2 >\ninline\nbool inrange(const T1& val, const T2& lo, const T2& hi)\n{\n return val >= lo && val <= hi;\n}\n\n}\n"
},
{
"alpha_fraction": 0.5334339141845703,
"alphanum_fraction": 0.5652561783790588,
"avg_line_length": 23.423786163330078,
"blob_id": "e8ce2d098ec0f4b7e314e72c7fbcbdaf4fcdc9db",
"content_id": "f16ca0909d6f25b28d6f082d53e9605f0cd8450c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 14619,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 597,
"path": "/athena/examples/LCM/Singlecar/obu/src/obu/obu_planning/draw_obu_planning.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <math.h>\n\n#include <GL/glu.h>\n#include <GL/glut.h>\n\n\n#include \"draw_obu_planning.h\"\n#include \"obu_planning.h\"\n\n\nint x_lbefore,y_lbefore;\nint x_rbefore,y_rbefore;\nint z_before1,z_before2;\n\nbool buttonSaveLeft, buttonSaveMiddle, buttonSaveRight;\nfloat x_move,y_move,z_move;\nfloat x_move_save,y_move_save, z_move_save;\nfloat x_rotate,y_rotate,z_rotate;\nfloat x_rotate_save,y_rotate_save,z_rotate_save;\nfloat m_zoom;\n\nfloat m_aspect;\n\nfloat m_eyex, m_eyey, m_eyez;\nfloat m_centerx, m_centery, m_centerz;\nfloat m_upx, m_upy, m_upz;\n\n///////////////////OPEN GL control ///////////////////////////////////////\nint g_frame;\nbool g_pause;\n\nvoid OpenGL_Draw()\n{\n x_move = 0,y_move = 0,z_move = 0;\n x_rotate =1,y_rotate=1,z_rotate=1;\n m_zoom=1;\n g_frame=0;\n}\n\nvoid Reshape(int w, int h)\n{\n glViewport(0, 0, (GLint)w, (GLint)h);\n\n m_aspect = (GLfloat) w / (GLfloat) h;\n\n glMatrixMode(GL_PROJECTION);\n glLoadIdentity();\n\n gluPerspective(45.0f,\n m_aspect,\n 0.0f,\n 400.0f);\n\n glMatrixMode(GL_MODELVIEW);\n glLoadIdentity();\n\n}\n\nvoid MouseMove(int x, int y)\n{\n int mod = glutGetModifiers();\n switch(mod)\n {\n case GLUT_ACTIVE_CTRL :\n x_rotate += (y - z_move_save)/100;\n if (x_rotate > 360)\n x_rotate=x_rotate - 360;\n if (x_rotate < -360)\n x_rotate=x_rotate + 360;\n return;\n\n case GLUT_ACTIVE_SHIFT :\n y_rotate += (y - z_move_save)/100;\n if (y_rotate > 360)\n y_rotate=y_rotate - 360;\n if (y_rotate < -360)\n y_rotate=y_rotate + 360;\n return;\n\n case GLUT_ACTIVE_ALT :\n float temp = (x - x_move_save)/100;\n z_rotate += atanf(temp);\n return;\n }\n\n if(buttonSaveLeft)\n {\n x_move += (x - x_move_save)/100;\n z_move += (y - z_move_save)/100;\n }\n\n if(buttonSaveMiddle)\n {\n float multiplay = (y - z_move_save)/10000;\n m_zoom =m_zoom*(1+multiplay);\n }\n\n if(buttonSaveRight)\n {\n float multiplay = (y - z_move_save)/10000;\n m_zoom =m_zoom*(1+multiplay);\n }\n}\n\nvoid PassiveMouseMove(int x, int y)\n{\n\n}\n\nvoid MouseRotate(int x, int y, int z)\n{\n}\n\nvoid MouseKey(int button, int state, int x, int y)\n{\n x_move_save=x;\n // y_move_save;\n z_move_save=y;\n\n switch (button)\n {\n case GLUT_LEFT_BUTTON:\n if(state == GLUT_DOWN)\n buttonSaveLeft=true;\n else\n buttonSaveLeft=false;\n break;\n\n case GLUT_MIDDLE_BUTTON:\n if(state == GLUT_DOWN)\n buttonSaveMiddle=true;\n else\n buttonSaveMiddle=false;\n break;\n\n case GLUT_RIGHT_BUTTON:\n if(state == GLUT_DOWN)\n buttonSaveRight=true;\n else\n buttonSaveRight=false;\n break;\n }\n}\n\nvoid Key(unsigned char key, int x, int y)\n{\n switch (key)\n {\n case KEY_ESC:\n // exit(0);\n break;\n }\n}\n\nvoid SpecialKey(int key, int x, int y)\n{\n int mod = 0;\n switch (key)\n {\n case GLUT_KEY_UP:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n\n }\n else\n y_move ++;\n\n break;\n\n case GLUT_KEY_DOWN:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n\n }\n else\n y_move --;\n break;\n\n case GLUT_KEY_LEFT:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n }\n else\n x_move --;\n break;\n\n case GLUT_KEY_RIGHT:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n }\n else\n x_move ++;\n break;\n\n case GLUT_KEY_PAGE_UP:\n m_zoom= 1.1*m_zoom;\n break;\n\n case GLUT_KEY_PAGE_DOWN\t:\n m_zoom =m_zoom/1.1;\n break;\n\n case GLUT_KEY_HOME:\n m_zoom=1.5*m_zoom;\n break;\n\n case GLUT_KEY_END:\n m_zoom=m_zoom/1.5;\n break;\n\n case GLUT_KEY_F1:\n x_rotate += 3;\n if (x_rotate > 360)\n x_rotate=x_rotate - 360;\n if (x_rotate < -360)\n x_rotate=x_rotate + 360;\n break;\n\n case GLUT_KEY_F2:\n x_rotate += -3;\n if (x_rotate > 360)\n x_rotate=x_rotate - 360;\n if (x_rotate < -360)\n x_rotate=x_rotate + 360;\n break;\n\n case GLUT_KEY_F3:\n y_rotate += 3;\n if (y_rotate > 360)\n y_rotate=y_rotate - 360;\n if (y_rotate < -360)\n y_rotate=y_rotate + 360;\n break;\n\n case GLUT_KEY_F4:\n y_rotate += -3;\n if (y_rotate > 360)\n y_rotate=y_rotate - 360;\n if (y_rotate < -360)\n y_rotate=y_rotate + 360;\n break;\n\n case GLUT_KEY_F5:\n z_rotate += atanf(3);\n break;\n\n case GLUT_KEY_F6:\n z_rotate += atanf(-3);\n break;\n\n case GLUT_KEY_F9:\n break;\n\n case GLUT_KEY_F10:\n break;\n\n case GLUT_KEY_F11:\n break;\n\n case GLUT_KEY_F12:\n break;\n }\n\n glutPostRedisplay();\n}\n\n/*\n g_rsu_planning->origin_x = 27749.4;\n g_rsu_planning->origin_y = -10034.9;\n g_rsu_planning->origin_z = 0;\n*/\n\nvoid Draw_Org()\n{\n double x = 27749.4;\n double y = -10034.9;\n\n glPointSize(8);\n glColor3d(1, 1, 1);\n glBegin(GL_POINTS);\n glVertex2f(x, y);\n glEnd();\n //\n glColor3d(1,1,1);\n glBegin(GL_LINES);\n glVertex2f(x - 2, y);\n glVertex2f(x + 2, y);\n glEnd();\n\n glBegin(GL_LINES);\n glVertex2f(x, y-3);\n glVertex2f(x, y+3);\n glEnd();\n}\n\nvoid draw_section_line(route::RouteBase route)\n{\n if (route.route_motion_info_.num_of_section == 0 )\n {\n return;\n }\n\n for(int i = 0; i < route.route_motion_info_.num_of_section; i++)\n {\n for(int j = 0; j < route.route_motion_info_.line[i].num_of_lane-1; j++)\n {\n\n //画点\n /*\n glPointSize(2);\n glColor3d(0, 3, 3);\n glBegin(GL_POINTS);\n glVertex2f(route.route_motion_info_.line[i].lane[j].x, route.route_motion_info_.line[i].lane[j].y);\n glEnd();\n */\n\n //画线\n /*\n glLineWidth(1);\n glColor3d(3, 3, 3);\n glBegin(GL_LINE_STRIP);\n glVertex2f(route.route_motion_info_.line[i].lane[j].x, route.route_motion_info_.line[i].lane[j].y);\n glVertex2f(route.route_motion_info_.line[i].lane[j+1].x, route.route_motion_info_.line[i].lane[j+1].y);\n glEnd();*/\n }\n }\n glutPostRedisplay(); //武汉经济技术开发区出口加工区A栋\n}\n\n\n\nvoid draw_obu_planning()\n{\n\n Draw_Org();\n\n //draw lanelet and centerline\n if(g_obu_planning->obu.route_draw_flag == true)\n {\n if(g_obu_planning->obu.route_draw[1] == NULL) return;\n if(g_obu_planning->obu.route_draw[1]->map_->lane_list_.size() > 0)\n {\n //g_obu_planning->obu.route_draw[1].draw();\n g_obu_planning->obu.route_draw[1]->draw_part_line();\n double x,y;\n g_obu_planning->obu.route_draw[1]->transfer_.cs.ll2xy(g_obu_planning->obu.cur_lat, g_obu_planning->obu.cur_lon, x, y);\n DrawCar(x, y, g_obu_planning->obu.cur_yaw, 0) ;\n }\n }\n else\n {\n if(g_obu_planning->obu.route_draw[0]->map_->lane_list_.size() > 0)\n {\n //g_obu_planning->obu.route_draw[0].draw();\n g_obu_planning->obu.route_draw[0]->draw_part_line();\n double x,y;\n g_obu_planning->obu.route_draw[0]->transfer_.cs.ll2xy(g_obu_planning->obu.cur_lat, g_obu_planning->obu.cur_lon, x, y);\n DrawCar(x, y, g_obu_planning->obu.cur_yaw, 0) ;\n }\n }\n\n glutPostRedisplay();\n}\n\nvoid myDisplay(void)\n{\n glClearColor(0.0, 0.0, 0.0, 0.0);\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n glClearColor(0.0, 0.0, 0.0, 0.0);\n\n glLoadIdentity();\n\n // x_rotate = -102, y_rotate = -3 , z_rotate = 57.4561 ;\n // x_move = 156.06, y_move = -166, z_move =0, m_zoom = 2.14359;\n double x,y;\n g_obu_planning->obu.route->transfer_.cs.ll2xy(g_obu_planning->obu.cur_lat, g_obu_planning->obu.cur_lon, x, y);\n\n m_eyex = x;\n m_eyey = y;\n m_eyez = 300;\n\n m_centerx = x ;\n m_centery = y;\n m_centerz = 0;\n\n m_upx = x;\n m_upy = y +10;\n m_upz = 0;\n\n gluLookAt(m_eyex, \t m_eyey, \t m_eyez,\n m_centerx,\tm_centery,\t m_centerz,\n m_upx,\t \tm_upy,\t\t m_upz);\n\n glScalef(1, 1, 1);\n\n glRotatef(x_rotate,1,0,0);\n glRotatef(y_rotate,0,1,0);\n glRotatef(z_rotate,0,0,1);\n\n glTranslatef(x_move, y_move, z_move);\n glScalef(m_zoom, m_zoom, m_zoom);\n\n draw_obu_planning();\n\n glFlush();\n glutSwapBuffers();\n}\n\n\nvoid MyGLDispIni()\n{\n GLenum type;\n\n m_eyex=0, m_eyey=0, m_eyez= 80;\n m_centerx=0, m_centery=0, m_centerz=0;\n m_upx=0, m_upy=1, m_upz=0;\n\n buttonSaveLeft=false;\n buttonSaveMiddle=false;\n buttonSaveRight=false;\n\n x_move=0.0;\n y_move=0.0;\n z_move=0.0;\n x_rotate=0.0;\n y_rotate=0.0;\n z_rotate=0.0;\n m_zoom=1;\n\n x_lbefore=0, y_lbefore=0;\n x_rbefore=0, y_rbefore=0;\n z_before1=0, z_before2=0;\n\n type = GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE;\n glutInitDisplayMode(type);\n\n glutInitWindowSize(600, 800);\n glutCreateWindow(\"obu_planning\");\n\n glutReshapeFunc( Reshape );\n glutKeyboardFunc(Key);\n glutSpecialFunc(SpecialKey);\n glutMouseFunc(MouseKey);\n glutMotionFunc(MouseMove);\n glutPassiveMotionFunc(PassiveMouseMove);\n glutSpaceballRotateFunc(MouseRotate);\n\n glutDisplayFunc( &myDisplay);\n}\n\nvoid DrawCar(double x, double y, double yaw, double steer_angle)\n{\n double front_track= 1.2;\n double front_wheel_wide = 1;\n double back_wheel_wide = 1;\n\n glPointSize(8);\n glColor3d(1,0,0);\n glBegin(GL_POINTS);\n glVertex2f( x + (1.2) * sin( yaw /180 *PI ),\n y + (1.2) * cos( yaw /180 *PI ) );\n glEnd();\n\n double frontwtht = to_radians(yaw);\n // double backwtht = to_radians(yaw + 90);\n double wheel_direction = to_radians(yaw + steer_angle /15.0 + 90); //\n\n double frontWhellCenterX = x + (1.2) * sin(frontwtht);\n double frontWhellCenterY = y + (1.2) * cos(frontwtht);\n double backWhellCenterX = x - (1.5) * sin(frontwtht);\n double backWhellCenterY = y - (1.5) * cos(frontwtht);\n\n glLineWidth(2);\n glColor3d(0,1,0);\n glBegin(GL_LINES);\n glVertex2f(x, y);\n glVertex2f(frontWhellCenterX,frontWhellCenterY );\n glEnd();\n\n glColor3d(1,0,0);\n glBegin(GL_LINES);\n glVertex2f(x, y);\n glVertex2f(backWhellCenterX,backWhellCenterY );\n glEnd();\n\n double frontWhellLeftX = frontWhellCenterX - (( front_track/2.0) * cos( frontwtht));\n double frontWhellLeftY = frontWhellCenterY + (( front_track/2.0) * sin( frontwtht));\n double frontWhellRightX = frontWhellCenterX + (( front_track/2.0) * cos( frontwtht));\n double frontWhellRightY = frontWhellCenterY - (( front_track/2.0) * sin( frontwtht));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(frontWhellLeftX, frontWhellLeftY);\n glVertex2f(frontWhellRightX, frontWhellRightY);\n glEnd();\n\n double frontLeftWhellUpX = frontWhellLeftX - (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontLeftWhellUpY = frontWhellLeftY + (( front_wheel_wide/2.0) * sin( wheel_direction ));\n double frontLeftWhellDownX = frontWhellLeftX + (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontLeftWhellDownY = frontWhellLeftY - (( front_wheel_wide/2.0) * sin( wheel_direction ));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(frontLeftWhellUpX, frontLeftWhellUpY);\n glVertex2f(frontLeftWhellDownX, frontLeftWhellDownY);\n glEnd();\n\n double frontrightWhellUpX = frontWhellRightX - (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontrightWhellUpY = frontWhellRightY + (( front_wheel_wide/2.0) * sin( wheel_direction ));\n double frontrightWhellDownX = frontWhellRightX + (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontrightWhellDownY = frontWhellRightY - (( front_wheel_wide/2.0) * sin( wheel_direction ));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(frontrightWhellUpX, frontrightWhellUpY);\n glVertex2f(frontrightWhellDownX, frontrightWhellDownY);\n glEnd();\n\n double BackWhellLeftX = backWhellCenterX - (( front_track/2.0) * cos( frontwtht));\n double BackWhellLeftY = backWhellCenterY + (( front_track/2.0) * sin( frontwtht));\n double BackWhellRightX = backWhellCenterX + (( front_track/2.0) * cos( frontwtht));\n double BackWhellRightY = backWhellCenterY - (( front_track/2.0) * sin( frontwtht));\n\n double backwtht = to_radians(yaw + 90);\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(BackWhellRightX, BackWhellRightY);\n glVertex2f(BackWhellLeftX, BackWhellLeftY);\n glEnd();\n\n double BackLeftWhellUpX = BackWhellLeftX - ((back_wheel_wide/2.0) * cos(backwtht));\n double BackLeftWhellUpY = BackWhellLeftY + (( back_wheel_wide/2.0) * sin(backwtht));\n double BackLeftWhellDownX = BackWhellLeftX + (( back_wheel_wide/2.0) * cos(backwtht));\n double BackLeftWhellDownY = BackWhellLeftY - (( back_wheel_wide/2.0) * sin(backwtht));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(BackLeftWhellDownX, BackLeftWhellDownY);\n glVertex2f(BackLeftWhellUpX, BackLeftWhellUpY);\n glEnd();\n\n double BackrightWhellUpX = BackWhellRightX - ((back_wheel_wide/2.0) * cos(backwtht));\n double BackrightWhellUpY = BackWhellRightY + ((back_wheel_wide/2.0) * sin(backwtht));\n double BackrightWhellDownX = BackWhellRightX + ((back_wheel_wide/2.0) * cos(backwtht));\n double BackrightWhellDownY = BackWhellRightY - ((back_wheel_wide/2.0) * sin(backwtht));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(BackrightWhellUpX, BackrightWhellUpY);\n glVertex2f(BackrightWhellDownX, BackrightWhellDownY);\n glEnd();\n}\n"
},
{
"alpha_fraction": 0.7124850153923035,
"alphanum_fraction": 0.7190876603126526,
"avg_line_length": 28.228069305419922,
"blob_id": "96217bb38a7cb8bb0cc5dbd894d4abde478bd453",
"content_id": "eb1fcb17dbc060ed85ca89a6272d1a2261054b92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1667,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 57,
"path": "/athena/core/arm/Map/include/RegulatoryElement.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n * \n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n \n#pragma once\n#include <boost/variant.hpp>\n#include <vector>\n#include <string>\n#include <memory>\n\n#include \"LaneletFwd.hpp\"\n#include \"lanelet_point.hpp\"\n#include \"LineStrip.hpp\"\n#include \"Attribute.hpp\"\n\nnamespace LLet\n{\n\ntypedef boost::variant< lanelet_ptr_t, strip_ptr_t, point_with_id_t > member_variant_t;\ntypedef std::pair< std::string, member_variant_t > regulatory_element_member_t;\n\nclass RegulatoryElement;\n\ntypedef std::shared_ptr< RegulatoryElement > regulatory_element_ptr_t;\n\nclass RegulatoryElement : public HasAttributes\n{\npublic:\n RegulatoryElement( int64_t id );\n int64_t id() const;\n const std::vector< regulatory_element_member_t >& members() const;\n std::vector< member_variant_t > members(std::string role) const;\n\n std::vector< regulatory_element_member_t >& members();\n\nprivate:\n std::vector< regulatory_element_member_t > _members;\n const int64_t _id;\n AttributeMap _attributes;\n};\n\n}\n"
},
{
"alpha_fraction": 0.6530343294143677,
"alphanum_fraction": 0.6530343294143677,
"avg_line_length": 20.657142639160156,
"blob_id": "e4f47904a9c8fdb422b3821a4f0ac83ed5ad1b8f",
"content_id": "e8716a2c20c37efbaa2a370ce103de276eff4bf9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 758,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 35,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkers.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarker.h\"\n\nclass LaneMarkers\n{\nprivate:\n FlexArray<ptrLaneMarker>\t*_faLaneMarkers;\n\npublic:\n LaneMarkers(void);\n ~LaneMarkers(void);\n LaneMarker *getLaneMarker(int idx);\n void deleteLaneMarker(void);\n void deleteLaneMarker(int iIdx);\n int getLaneMarkerNumber(void);\n void addLaneMarker(LaneMarker *pLaneMarker);\n\n inline void remove_delete(int iIdx)\n {\n _faLaneMarkers->remove_delete(iIdx);\n }\n inline void clear_reset(void)\n {\n _faLaneMarkers->clear();\n _faLaneMarkers->reset();\n }\n inline void reset(void)\n {\n _faLaneMarkers->reset();\n }\n void set(int iIdx, LaneMarker *pLaneMarker);\n};\n"
},
{
"alpha_fraction": 0.5742419362068176,
"alphanum_fraction": 0.6092528700828552,
"avg_line_length": 16.87150764465332,
"blob_id": "49795dc375b24d8999d88a46eaddc1ae6decbd93",
"content_id": "bd9396a67bc21c5d7322b8f97ca10b0b461d9a77",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3199,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 179,
"path": "/athena/core/arm/Control/include/common/math_util.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _MATHUTIL_H\n#define _MATHUTIL_H\n\n#include <math.h>\n#include <stdlib.h>\n#include <stdint.h>\n#include <assert.h>\n#include <iostream>\n\n\n#ifndef PI\n#define PI 3.14159265358979323846264338\n#endif\n\n#define to_radians(x) ( (x) * (PI / 180.0 ))\n#define to_degrees(x) ( (x) * (180.0 / M_PI ))\n\n#define TWOPI_INV (0.5/PI)\n#define TWOPI (2*PI)\n\nstatic inline double sq(double v)\n{\n return v*v;\n}\n\nstatic inline double sgn(double v)\n{\n return (v>=0) ? 1 : -1;\n}\n\n// random number between [0, 1)\nstatic inline float randf()\n{\n return ((float) rand()) / (RAND_MAX + 1.0);\n}\n\nstatic inline float signed_randf()\n{\n return randf()*2 - 1;\n}\n\n// return a random integer between [0, bound)\nstatic inline int irand(int bound)\n{\n int v = (int) (randf()*bound);\n\n#ifdef NEED_ASSERT\n assert(v >= 0);\n assert(v < bound);\n#endif\n\n return v;\n}\n\n/** valid only for v > 0 **/\nstatic inline double mod2pi_positive(double vin)\n{\n double q = vin * TWOPI_INV + 0.5;\n int qi = (int) q;\n\n return vin - qi*TWOPI;\n}\n\n/** Map v to [-PI, PI] **/\nstatic inline double mod2pi(double vin)\n{\n if (vin < 0)\n return -mod2pi_positive(-vin);\n else\n return mod2pi_positive(vin);\n}\n\n/** Return vin such that it is within PI degrees of ref **/\nstatic inline double mod2pi_ref(double ref, double vin)\n{\n return ref + mod2pi(vin - ref);\n}\n\nstatic inline int theta_to_int(double theta, int max)\n{\n theta = mod2pi_ref(PI, theta);\n int v = (int) (theta / ( 2 * PI ) * max);\n\n if (v==max)\n v = 0;\n\n#ifdef NEED_ASSERT\n assert (v >= 0 && v < max);\n#endif\n\n return v;\n}\n\nstatic inline int imin(int a, int b)\n{\n return (a < b) ? a : b;\n}\n\nstatic inline int imax(int a, int b)\n{\n return (a > b) ? a : b;\n}\n\nstatic inline int64_t imin64(int64_t a, int64_t b)\n{\n return (a < b) ? a : b;\n}\n\nstatic inline int64_t imax64(int64_t a, int64_t b)\n{\n return (a > b) ? a : b;\n}\n\nstatic inline int iclamp(int v, int minv, int maxv)\n{\n return imax(minv, imin(v, maxv));\n}\n\nstatic inline double fclamp(double v, double minv, double maxv)\n{\n return fmax(minv, fmin(v, maxv));\n}\n\nstatic inline double fclamp_360(double v)\n{\n if(v > 360.0)\n v -= 360.0;\n else if(v < 0.0)\n v += 360.0;\n\n return v;\n}\n\nstatic inline double length_two_points(double x, double y, double xx, double yy)\n{\n double l;\n double x_xx = x - xx;\n double y_yy = y - yy;\n\n l = sqrt( x_xx*x_xx + y_yy*y_yy );\n\n return l;\n}\n\nstatic inline double streeing_to_ks(double steering_angle,int32_t k_trans,double vehicle_length)\n{\n double k_s=0;\n\n k_s = tan(\n to_radians(steering_angle / k_trans)\n )/ vehicle_length;\n\n return k_s;\n}\n\nstatic inline double speed_from_ll(double lon_speed, double lat_speed)\n{\n double speed=0;\n speed = sqrt( lon_speed*lon_speed + lat_speed*lat_speed );\n\n return speed;\n}\n\n\nstatic inline double calculate_vertical_direction(double current_head)\n{\n double vertical_direction;\n vertical_direction = current_head + 90;\n\n if(vertical_direction > 360)\n vertical_direction -= 360;\n else if(vertical_direction < 0)\n vertical_direction += 360;\n\n return vertical_direction;\n}\n\n\n#endif\n"
},
{
"alpha_fraction": 0.6289023160934448,
"alphanum_fraction": 0.6296576261520386,
"avg_line_length": 26.020408630371094,
"blob_id": "ea84c96abb2be2ccbfa522f63efe13ddf7685e91",
"content_id": "7ec38433e34ba01fbd7ab21a68deb60fffaa8a6e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4016,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 147,
"path": "/athena/examples/LCM/Singlecar/planning/planning_node.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _PLANNING_NODE_H\n#define _PLANNING_NODE_H\n\n#include <string>\n#include <iostream>\n#include <unistd.h>\n\n#include <lcm/lcm.h>\n#include <lcm/lcm-cpp.hpp>\n\n#include <thread>\n#include <pthread.h>\n#include <sys/sem.h>\n\n\n#include \"planning_lcm_msg.h\"\n#include \"planning/planning.h\"\n#include \"common/enum_list.h\"\n\nusing namespace std;\n\nclass PlanningNode\n{\npublic:\n PlanningNode();\n ~PlanningNode();\n\n void run();\n int park_planning( athena::planning::Planning &planning, double view_step, bool is_col_last_d, bool is_col_last_r );\n\n int publish_trajectory( OutTrajectory ot );\n int publish_view_path( path out_path, double step ); ///step:发送点的间隔,单位:米\n int publish_view_path( OutTrajectory out_traj, double step ); ///step:发送点的间隔,单位:米\n int publish_alarm( int32_t alarm_level, string alarm );\n\n\nprivate:\n /// variables\n string planning_url_;\n\n ///input\n route_planning_m route_;\n CarState car_state_;\n TrafficLights traffic_lights_;\n StopPark stop_park_;\n SensorObstacles sensor_obstacles_;\n\n int hand_direction_;\n int last_hand_direction_;\n\n ///flag\n bool is_route_set_;\n\n ///class\n athena::planning::Planning planning_;\n\n ///thread function\n void* ThreadFunction_ins();\n void* ThreadFunction_can();\n void* ThreadFunction_route();\n void* ThreadFunction_map();\n void* ThreadFunction_obstacle();\n\n\n ///lcm msg\n lcm::LCM* lcm_ins_;\n lcm::LCM* lcm_can_;\n lcm::LCM* lcm_route_;\n lcm::LCM* lcm_map_;\n lcm::LCM* lcm_obstacle_;\n lcm::LCM* lcm_publish_;\n ///callback function\n //ins\n void handle_ins_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ins_info * msg);\n //can\n void handle_can_value_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::CAN_value* msg);\n //can\n void handle_can_status_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::CAN_status* msg);\n //can\n void handle_vehicle_info_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::vehicle_info* msg);\n //can\n void handle_chassis_detail_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ChassisDetail* msg);\n //can\n// void handle_steering_Message(\n// const lcm::ReceiveBuffer* rbuf,\n// const std::string& chan,\n// const obu_lcm::steering_feedback_info* msg);\n //route\n void handle_route_info(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::om_route_respond* msg);\n //crossing & lights\n void handle_traffic_lights_info(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::om_traffic_lights_report* msg);\n //stop point\n void handle_back_coordinate_XYH(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::back_coordinate_XYH* msg);\n //start\n void handle_start_respond(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::ou_start_auto_respond* msg);\n //stop\n void handle_stop_respond(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::ou_stop_auto_respond* msg);\n //obstacle\n void handle_obstacle_Message(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const nad_lcm::sensor_obstacle_report* msg_obstacle_list);\n\n /// initializer\n void init();\n\n ///\n int get_current_ins_info( const obu_lcm::ins_info * msg );\n int get_map_info( const nad_lcm::om_route_respond* msg );\n int get_obstacles_raw( const nad_lcm::sensor_obstacle_report* msg_obstacle_list );\n\n};\n\nint point2msg( const navi_point& src_point, obu_lcm::nav_points* msg_p );\n\n\n#endif // _PLANNING_NODE_H\n"
},
{
"alpha_fraction": 0.5866050720214844,
"alphanum_fraction": 0.6120092272758484,
"avg_line_length": 15.037036895751953,
"blob_id": "41cc45002f99e42602020dcd3892472a0261341e",
"content_id": "8523a3199070384bd329dc73073d3057c86b7d2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 433,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 27,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/acc/accelerate.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control_logic.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROL_LOGIC_ACC_ACCELERATE_H_\n#define CONTROL_LOGIC_ACC_ACCELERATE_H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n template <class T>\n class Accelerate\n {\n\n };\n}\n}\n\n#endif //CONTROL_LOGIC_ACC_ACCELERATE_H_\n"
},
{
"alpha_fraction": 0.6441552042961121,
"alphanum_fraction": 0.650540292263031,
"avg_line_length": 27.879432678222656,
"blob_id": "50aeaed6f5c412908e92efa233f97a2cef89ead8",
"content_id": "e35587b6e371547b381b847ad0f4c9ef602355ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4072,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 141,
"path": "/athena/examples/ROS/src/DataRecording/usb_camera/src/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <ros/ros.h>\n#include <image_transport/image_transport.h>\n#include <cv_bridge/cv_bridge.h>\n#include <sensor_msgs/image_encodings.h>\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n#include <opencv2/opencv.hpp> \n\n#include <boost/thread.hpp> \n#include <boost/bind.hpp> \n\nstatic const std::string OPENCV_WINDOW = \"Image window\";\n\nclass ImageConverter\n{\n \tros::NodeHandle nh_;\n\tros::NodeHandle nh_private_;\n \timage_transport::ImageTransport it_;\n \timage_transport::Subscriber image_sub_;\n \timage_transport::Publisher image_pub_;\n\n\tint camera_device_;\n\tcv::Mat inImage_; \n\tcv::VideoCapture videoCap_;\n\tcv::VideoWriter writer_;\n\t//std::string videoName_;\n\n\n\tint image_width_,image_height_,video_device_;\n\tstd::string frame_id_;\n\npublic:\n\n\ttypedef boost::function<void()> CaptureImageCb;\n\n \tImageConverter(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private)\n \t\t:it_(nh_)\n\t\t,nh_(nh)\n\t\t,nh_private_(nh_private)\n\t\t,camera_device_(0)\n\t\t//,videoName_(std::string(\"\"))\n \t{\n\t\tnh_private_.getParam(\"image_width\", image_width_);\n\t\tnh_private_.getParam(\"image_height\", image_height_);\n\t\tnh_private_.getParam(\"video_device\", video_device_);\n\t\tnh_private_.getParam(\"frame_id_\", frame_id_);\n\t\t\t\n\t\tROS_INFO(\"width: %d, height: %d, device_id: %d, frame_id: %s\",image_width_,image_height_,video_device_,frame_id_.c_str());\n\n\t\t//Open Camera\n\t\tvideoCap_ = cv::VideoCapture(video_device_); /*open the default camera device*/ \n\t\tif(!videoCap_.isOpened()) /*check open result*/ \n\t\t{\n\t\t\tROS_ERROR(\"Open camera failed!\");\n\t\t \treturn;\n\t\t} \n\t\tvideoCap_.set(CV_CAP_PROP_FRAME_WIDTH, image_width_); \n \t\tvideoCap_.set(CV_CAP_PROP_FRAME_HEIGHT, image_height_); \n\t\tboost::thread captureImageThread(boost::bind(&ImageConverter::startCameraLoop,this));\t\n\n\t \t// Subscrive to input video feed and publish output video feed\n\t \timage_sub_ = it_.subscribe(\"/camera/image_raw\", 1,&ImageConverter::imageCb, this);\n\t \timage_pub_ = it_.advertise(\"/usb_cam/image_raw\", 1);\n\n\t\t//cv::namedWindow(OPENCV_WINDOW);\n\n\t\tint imageWidth = static_cast<int>(videoCap_.get(CV_CAP_PROP_FRAME_WIDTH)); \n\t\tint imageHeight = static_cast<int>(videoCap_.get(CV_CAP_PROP_FRAME_HEIGHT)); \n\n\t\tROS_INFO(\"image width: %d, height: %d\",imageWidth,imageHeight);\n\t\t//writer_.open(\"/home/nvidia/cameraVideo.avi\",CV_FOURCC('M','J','P','G'),videoCap_.get(CV_CAP_PROP_FPS),cv::Size(imageWidth,imageHeight),true);\n \t}\n\n \t~ImageConverter()\n \t{\n \t\t//cv::destroyWindow(OPENCV_WINDOW);\n\t\tvideoCap_.release();\n\t\t//writer_.release();\n \t}\n\n \tvoid imageCb(const sensor_msgs::ImageConstPtr& msg)\n \t{\n\t \tcv_bridge::CvImagePtr cv_ptr;\n\t \ttry\n\t \t{\n\t \t\tcv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);\n\t \t}\n\t \tcatch (cv_bridge::Exception& e)\n\t \t{\n\t \t\tROS_ERROR(\"cv_bridge exception: %s\", e.what());\n\t \t\treturn;\n\t \t}\n\n \t\t// Draw an example circle on the video stream\n \t\tif (cv_ptr->image.rows > 60 && cv_ptr->image.cols > 60)\n \t\t\tcv::circle(cv_ptr->image, cv::Point(50, 50), 10, CV_RGB(255,0,0));\n\n \t\t// Update GUI Window\n \t\tcv::imshow(OPENCV_WINDOW, cv_ptr->image);\n \t\tcv::waitKey(3);\n\n \t\t// Output modified video stream\n \t\timage_pub_.publish(cv_ptr->toImageMsg());\n \t}\n\n\tvoid startCameraLoop()\n\t{\n\t\tcv_bridge::CvImagePtr cv_ptr;\n\t\twhile(ros::ok())\n\t\t{\n\t\t\tvideoCap_>>inImage_;\n\t\t\tif(NULL == inImage_.data)\n\t\t\t\tcontinue;\n\n\t\t\t//save frame to video\n\t\t\twriter_.write(inImage_);\n\n\t\t\t//tranform cv::Mat to sensor_msgs/Image through cv_bridge\n\t\t\tstd_msgs::Header header;\n\t\t\theader.stamp = ros::Time::now();\n\t\t\theader.frame_id = frame_id_;\n\t\t\tstd::string encoding(\"bgr8\");\n\t\t\tcv_ptr = cv_bridge::CvImagePtr(new cv_bridge::CvImage(header,encoding,inImage_));\n\n\t\t\timage_pub_.publish(cv_ptr->toImageMsg());\n\t\t}\n\t}\n};\n\nint main(int argc, char** argv)\n{\n \tros::init(argc, argv, \"image_converter\");\n\tros::NodeHandle nh;\n\tros::NodeHandle nh_private(\"~\");\n\n \tImageConverter ic(nh,nh_private);\n \tros::AsyncSpinner spinner(2);\n\tspinner.start();\n \tros::waitForShutdown();\n \treturn 0;\n}\n"
},
{
"alpha_fraction": 0.4917127192020416,
"alphanum_fraction": 0.5230202674865723,
"avg_line_length": 13.675675392150879,
"blob_id": "3def474c3ce93e93d3fc1214b7df8b1c8b549ecb",
"content_id": "8a4ef8299947da10cadabcd5f202b40f344b71a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 571,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 37,
"path": "/athena/core/arm/Control/include/gear_position.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file gear_position.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef GEAR_POSITION_H_\n#define GEAR_POSITION_H_\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n\n/**\n * @class ControllerOutputAlarm\n *\n * @brief 控制警报输出.\n */\nenum GearPosition:int32_t\n{\n AT_P = 0, /**< P档位*/\n AT_R = 1, /**< R档位*/\n AT_N = 2, /**< N档位*/\n AT_D = 3, /**< D档位*/\n};\n}\n}\n\n#endif // GEAR_POSITION_H_\n"
},
{
"alpha_fraction": 0.6234738230705261,
"alphanum_fraction": 0.6268215775489807,
"avg_line_length": 26.010639190673828,
"blob_id": "efc169f62483463b8c8626ee15e94f0884d76838",
"content_id": "4353a05d5048bea57ef28a859dc5667192f31171",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5079,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 188,
"path": "/athena/core/arm/Map/include/LaneletGraph.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n *\n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include <boost/graph/adjacency_list.hpp>\n#include <boost/graph/filtered_graph.hpp>\n#include <boost/graph/astar_search.hpp>\n#include <boost/graph/dijkstra_shortest_paths.hpp>\n#include <boost/optional.hpp>\n\n#include <boost/foreach.hpp>\n\n#include \"Lanelet.hpp\"\n#include \"LocalGeographicCS.hpp\"\n#include <vector>\n#include <vector>\n#include <deque>\n#include <queue>\n#include <map>\n\n#include \"RTree.h\"\n\nnamespace LLet\n{\n\nstruct EdgeInfo\n{\n int64_t src;\n int64_t dest;\n double routing_cost;\n double turn_right;\n double turn_left;\n double turn;\n double block;\n EdgeInfo()\n {\n src=0;\n dest=0;\n routing_cost=turn=turn_right=turn_left=block=0.0;\n }\n};\n\nstruct VertexInfo\n{\n lanelet_ptr_t lanelet;\n};\n\ntypedef boost::adjacency_list< boost::vecS, boost::vecS, boost::bidirectionalS, VertexInfo, EdgeInfo > Graph;\n\ntypedef Graph::vertex_descriptor node_t;\ntypedef Graph::edge_descriptor arc_t;\n\ntemplate < typename G >\nclass distance_heuristic : public boost::astar_heuristic< G, double>\n{\npublic:\n\n distance_heuristic(G& graph, typename G::vertex_descriptor target) : graph(graph), target(target)\n {}\n double operator()(typename G::vertex_descriptor u)\n {\n return 0;\n }\n\nprivate:\n G& graph;\n typename G::vertex_descriptor target;\n};\n\nstruct found_goal {}; // exception for termination\n\n// visitor that terminates when we find the goal\ntemplate <class Vertex>\nclass astar_goal_visitor : public boost::default_astar_visitor\n{\npublic:\n astar_goal_visitor(Vertex goal) : m_goal(goal) {}\n template <class Graph>\n void examine_vertex(Vertex u, Graph& g) {\n if(u == m_goal)\n throw found_goal();\n }\nprivate:\n Vertex m_goal;\n};\n\ntemplate< typename G >\nstruct Dijkstra\n{\n typedef typename G::vertex_descriptor vertex_t;\n G& graph;\n vertex_t source;\n\n std::vector< vertex_t > _predecessors;\n std::vector< double > _distances;\n\n void abort_if_vertex_invalid( vertex_t v )\n {\n if( v < 0 || v >= boost::num_vertices(graph) )\n {\n std::cerr << \"invalid vertices in Dijkstra.\" << std::endl;\n std::cerr << v << \" (\" << boost::num_vertices(graph) << \")\" << std::endl;\n abort();\n }\n }\n\n Dijkstra( G& graph, vertex_t source ) : graph(graph), source(source)\n {\n using namespace boost;\n\n abort_if_vertex_invalid( source );\n auto num_vertices = boost::num_vertices( graph );\n this->_predecessors.resize( num_vertices );\n this->_distances.resize( num_vertices );\n std::fill( _distances.begin(), _distances.end(), std::numeric_limits<double>::max() );\n\n boost::dijkstra_shortest_paths\n (graph, source,\n predecessor_map(_predecessors.data()).distance_map(_distances.data()).\n weight_map(get(&EdgeInfo::routing_cost, graph)));\n }\n\n std::deque< vertex_t > shortest_path( vertex_t target )\n {\n abort_if_vertex_invalid( target );\n const static std::deque< vertex_t > EMPTY_LIST;\n if( !reached(target) )\n return EMPTY_LIST;\n\n std::deque< vertex_t > sp;\n for( auto v = target; v != source ; v = _predecessors[v] )\n {\n sp.push_front( v );\n }\n\n sp.push_front( source );\n\n assert( sp.empty() || (sp.front() == source && sp.back() == target) && \"Dijkstra returns either empty list or path from source to target.\" );\n\n return sp;\n }\n\n bool reached( vertex_t target )\n {\n static const double BIG_NUM = 1e9;\n return _distances[target] < BIG_NUM;\n }\n};\n\ntemplate<typename G>\nboost::optional< std::vector< typename G::vertex_descriptor > >\ndijkstra_shortest_path( G& g, typename G::vertex_descriptor from, typename G::vertex_descriptor to )\n{\n boost::optional< std::vector< typename G::vertex_descriptor > > result;\n auto dij = Dijkstra< G >( g, from );\n\n if(!dij.reached(to))\n return result;\n\n auto sp = dij.shortest_path( to );\n\n std::vector< typename G::vertex_descriptor > my_sp( sp.size() );\n std::copy(sp.begin(), sp.end(), my_sp.begin());\n\n assert( ( my_sp.empty() && sp.empty() ) || (my_sp.front() == sp.front() && my_sp.back() == sp.back() ) && \"deque and vector are equal.\" );\n\n result = my_sp;\n return result;\n}\n\n}\n"
},
{
"alpha_fraction": 0.5562067627906799,
"alphanum_fraction": 0.5899994969367981,
"avg_line_length": 25.122758865356445,
"blob_id": "c1accc256ec8700515474b562122c9c264c1eef4",
"content_id": "702f2f040a10fdf45b40208fd994a29acab5a1eb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 18939,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 725,
"path": "/athena/examples/LCM/Singlecar/control/apps/control_view/control_view.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"control_view.h\"\n\nnamespace athena{\nnamespace control{\n\nint ControlView::x_lbefore = 0.0;\nint ControlView::y_lbefore = 0.0;\nint ControlView::x_rbefore = 0.0;\nint ControlView::y_rbefore = 0.0;\nint ControlView::z_before1 = 0.0;\nint ControlView::z_before2 = 0.0;\n\nbool ControlView::buttonSaveLeft = false;\nbool ControlView::buttonSaveMiddle = false;\nbool ControlView::buttonSaveRight = false;\nfloat ControlView::x_move = 0.0;\nfloat ControlView::y_move = 0.0;\nfloat ControlView::z_move = 0.0;\nfloat ControlView::x_move_save = 0.0;\nfloat ControlView::y_move_save = 0.0;\nfloat ControlView::z_move_save = 0.0;\nfloat ControlView::x_rotate = 0.0;\nfloat ControlView::y_rotate = 0.0;\nfloat ControlView::z_rotate = 0.0;\nfloat ControlView::x_rotate_save = 0.0;\nfloat ControlView::y_rotate_save = 0.0;\nfloat ControlView::z_rotate_save = 0.0;\nfloat ControlView::m_zoom = 0.0;\n\nfloat ControlView::m_aspect = 0.0;\n\nfloat ControlView::m_eyex = 0.0;\nfloat ControlView::m_eyey = 0.0;\nfloat ControlView::m_eyez = 0.0;\nfloat ControlView::m_eyeyaw = 0.0;\nfloat ControlView::m_centerx = 0.0;\nfloat ControlView::m_centery = 0.0;\nfloat ControlView::m_centerz = 0.0;\nfloat ControlView::m_upx = 0.0;\nfloat ControlView::m_upy = 0.0;\nfloat ControlView::m_upz = 0.0;\nint ControlView::g_frame = 0;\nbool ControlView::g_pause;\ndouble ControlView::front_track = 1.2;\ndouble ControlView::front_wheel_wide = 1;\ndouble ControlView::back_wheel_wide = 1;\n\ndouble ControlView::vehicle_width_ = 1.7;\n\nint32_t ControlView::match_point_num_ = 0;\ndouble ControlView::position_x_c = 0.0;\ndouble ControlView::position_y_c = 0.0;\ndouble ControlView::yaw_c = 0.0;\n\nControlLogic *ControlView::control_logic_;\nDebugOutput ControlView::debug_output_;\n\ndouble ControlView::tar_speed_debug_ = 0.0;\n\nvoid ControlView::OpenGL_Draw()\n{\n x_move = 0,y_move = 0,z_move = 0;\n x_rotate =1,y_rotate=1,z_rotate=1;\n m_zoom=1;\n g_frame=0;\n}\n\nvoid ControlView::Reshape(int w, int h)\n{\n glViewport(0, 0, (GLint)w, (GLint)h);\n\n m_aspect = (GLfloat) w / (GLfloat) h;\n\n glMatrixMode(GL_PROJECTION);\n glLoadIdentity();\n\n gluPerspective(45.0f,\n m_aspect,\n 0.0f,\n 4000.0f);\n\n glMatrixMode(GL_MODELVIEW);\n glLoadIdentity();\n\n}\n\nvoid ControlView::MouseMove(int x, int y)\n{\n int mod = glutGetModifiers();\n switch(mod)\n {\n case GLUT_ACTIVE_CTRL :\n x_rotate += (y - z_move_save)/100;\n if (x_rotate > 360)\n x_rotate=x_rotate - 360;\n if (x_rotate < -360)\n x_rotate=x_rotate + 360;\n return;\n\n case GLUT_ACTIVE_SHIFT :\n y_rotate += (y - z_move_save)/100;\n if (y_rotate > 360)\n y_rotate=y_rotate - 360;\n if (y_rotate < -360)\n y_rotate=y_rotate + 360;\n return;\n\n case GLUT_ACTIVE_ALT :\n float temp = (x - x_move_save)/100;\n z_rotate += atanf(temp);\n return;\n\n }\n\n if(buttonSaveLeft)\n {\n x_move += (x - x_move_save)/100;\n z_move += (y - z_move_save)/100;\n }\n\n if(buttonSaveMiddle)\n {\n float multiplay = (y - z_move_save)/1000000;\n m_zoom =m_zoom*(1+multiplay);\n }\n\n if(buttonSaveRight)\n {\n float multiplay = (y - z_move_save)/1000000;\n m_zoom =m_zoom*(1+multiplay);\n }\n\n}\n\nvoid ControlView::PassiveMouseMove(int x, int y)\n{\n\n}\n\nvoid ControlView::MouseRotate(int x, int y, int z)\n{\n // cout << \"mouse Rotate \" << x << \" \"<< y << \" \"<< z <<endl;\n}\n\nvoid ControlView::MouseKey(int button, int state, int x, int y)\n{\n x_move_save=x;\n// y_move_save;\n z_move_save=y;\n\n switch (button)\n {\n case GLUT_LEFT_BUTTON:\n if(state == GLUT_DOWN)\n buttonSaveLeft=true;\n else\n buttonSaveLeft=false;\n break;\n\n case GLUT_MIDDLE_BUTTON:\n if(state == GLUT_DOWN)\n buttonSaveMiddle=true;\n else\n buttonSaveMiddle=false;\n break;\n\n case GLUT_RIGHT_BUTTON:\n if(state == GLUT_DOWN)\n buttonSaveRight=true;\n else\n buttonSaveRight=false;\n break;\n }\n}\n\nvoid ControlView::Key(unsigned char key, int x, int y)\n{\n switch (key)\n {\n case KEY_ESC:\n control_logic_ -> SetDrivingModeDebug(1);\n control_logic_ -> SetTarSpeedDebug(0,false);\n break;\n }\n}\n\nvoid ControlView::SpecialKey(int key, int x, int y)\n{\n int mod = 0;\n switch (key)\n {\n case GLUT_KEY_UP:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n\n }\n else\n y_move ++;\n\n break;\n\n case GLUT_KEY_DOWN:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n\n }\n else\n y_move --;\n break;\n\n case GLUT_KEY_LEFT:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n\n }\n else\n x_move --;\n break;\n\n case GLUT_KEY_RIGHT:\n mod = glutGetModifiers();\n if (mod == GLUT_ACTIVE_ALT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_SHIFT)\n {\n\n }\n else if (mod == GLUT_ACTIVE_CTRL)\n {\n\n }\n else\n x_move ++;\n break;\n\n case GLUT_KEY_PAGE_UP:\n m_zoom= 1.1*m_zoom;\n break;\n\n case GLUT_KEY_PAGE_DOWN\t:\n m_zoom =m_zoom/1.1;\n break;\n\n case GLUT_KEY_HOME:\n m_zoom=1.5*m_zoom;\n break;\n\n case GLUT_KEY_END:\n m_zoom=m_zoom/1.5;\n break;\n\n case GLUT_KEY_F1:\n control_logic_ -> SetDrivingModeDebug(3);\n break;\n\n case GLUT_KEY_F2:\n x_rotate += -3;\n if (x_rotate > 360)\n x_rotate=x_rotate - 360;\n if (x_rotate < -360)\n x_rotate=x_rotate + 360;\n break;\n\n case GLUT_KEY_F3:\n y_rotate += 3;\n if (y_rotate > 360)\n y_rotate=y_rotate - 360;\n if (y_rotate < -360)\n y_rotate=y_rotate + 360;\n break;\n\n case GLUT_KEY_F4:\n y_rotate += -3;\n if (y_rotate > 360)\n y_rotate=y_rotate - 360;\n if (y_rotate < -360)\n y_rotate=y_rotate + 360;\n break;\n\n case GLUT_KEY_F5:\n\n z_rotate += atanf(3);\n break;\n\n case GLUT_KEY_F6:\n\n z_rotate += atanf(-3);\n break;\n\n case GLUT_KEY_F9:\n\n\n break;\n\n case GLUT_KEY_F10:\n control_logic_ -> SetDrivingModeDebug(0);\n break;\n\n case GLUT_KEY_F11:\n tar_speed_debug_ += 2;\n control_logic_ -> SetTarSpeedDebug(tar_speed_debug_,true);\n break;\n\n case GLUT_KEY_F12:\n tar_speed_debug_ -= 2;\n if(tar_speed_debug_ < 0)\n tar_speed_debug_ = 0;\n control_logic_ -> SetTarSpeedDebug(tar_speed_debug_,true);\n // g_A60_controller->b_motion_plann_continue = true;\n // cout << \" b_motion_plann_continue \" << g_A60_controller->b_motion_plann_continue << endl;\n break;\n\n }\n glutPostRedisplay();\n}\n\nvoid ControlView::myDisplay(void)\n{\n glClearColor(0.0, 0.0, 0.0, 0.0);\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n glClearColor(0.0, 0.0, 0.0, 0.0);\n glLoadIdentity();\n/*\n m_eyex = g_A60_controller->CurrentX;\n m_eyey = g_A60_controller->CurrentY;\n m_eyez = g_A60_controller->CurrentZ + 20;\n\n m_centerx = g_A60_controller->CurrentX + 40 * sin( to_radians(g_A60_controller->Current_heading));\n m_centery = g_A60_controller->CurrentY + 40 * cos( to_radians(g_A60_controller->Current_heading));\n m_centerz = g_A60_controller->CurrentZ;\n\n m_upx = g_A60_controller->CurrentX;\n m_upy = g_A60_controller->CurrentY ;\n m_upz = g_A60_controller->CurrentZ ;\n*/\n debug_output_.path_.ref_points_.clear();\n control_logic_ -> GetControllerInfo(debug_output_);\n // std::cout<<\"match_point_num_:\"<<debug_output_.match_point_num_<<endl;\n m_eyex = debug_output_.local_localization_.current_x_;\n m_eyey = debug_output_.local_localization_.current_y_;\n m_eyez = debug_output_.local_localization_.current_z_ + 30;\n m_eyeyaw = debug_output_.local_localization_.current_yaw_;\n if(debug_output_.path_.IsEmpty() == false)\n {\n match_point_num_ = debug_output_.match_point_num_;\n position_x_c = debug_output_.path_.ref_points_[match_point_num_].position_x_;\n position_y_c = debug_output_.path_.ref_points_[match_point_num_].position_y_;\n yaw_c = debug_output_.path_.ref_points_[match_point_num_].heading_;\n }\n else{\n position_x_c = 0;\n position_y_c = 0;\n yaw_c = 0;\n }\n m_centerx = debug_output_.local_localization_.current_x_;\n m_centery = debug_output_.local_localization_.current_y_;\n m_centerz = debug_output_.local_localization_.current_z_;\n\n m_upx = debug_output_.local_localization_.current_x_;\n m_upy = debug_output_.local_localization_.current_y_ + 10;\n m_upz = debug_output_.local_localization_.current_z_;\n gluLookAt(m_eyex, \t m_eyey, \t m_eyez,\n m_centerx,\tm_centery,\t m_centerz,\n m_upx,\t \tm_upy,\t\t m_upz);\n glScalef(1, 1, 1);\n glRotatef(x_rotate,1,0,0);\n glRotatef(y_rotate,0,1,0);\n glRotatef(z_rotate,0,0,1);\n glTranslatef(x_move, y_move, z_move);\n glScalef(m_zoom, m_zoom, m_zoom);\n Draw_Org();\n //std::cout<<\"m_eyeyaw:\"<<m_eyeyaw<<endl;\n DrawCar(m_eyex,m_eyey,m_eyeyaw,control_logic_->GetSteeringAngleFeedback());\n DrawCar_e(m_eyex, m_eyey,position_x_c, position_y_c,m_eyeyaw, yaw_c);\n if(debug_output_.path_.IsEmpty() == false)\n {\n Draw_Virtual_lane(debug_output_.path_, 100, debug_output_.path_.ref_points_.size(), 10, 0, 1, 0);\n Draw_Best_March_Point(debug_output_.path_, debug_output_.match_point_num_);\n }\n\n glutPostRedisplay();\n usleep(10000);\n\n glFlush();\n glutSwapBuffers();\n}\n\nvoid ControlView::Draw_Virtual_lane(path& v_p,\n int output_st_pos,\n int output_en_pos,\n int every,\n int r, int g, int b)\n{\n unsigned int i;\n double direction;\n double direction_d;\n double cos_d;\n double sin_d;\n\n double x, xx, x11, x22, x33, x44;\n double y, yy, y11, y22, y33, y44;\n\n int length = v_p.ref_points_.size();\n\n if( length - every <= 0)\n return;\n\n for ( i = 0; i < v_p.ref_points_.size() - every; i+=every )\n {\n glLineWidth(1);\n glColor3d(r,g,b);\n\n direction = v_p.ref_points_[i].heading_;\n x = v_p.ref_points_[i].position_x_;\n y = v_p.ref_points_[i].position_y_;\n direction_d = to_radians(direction);\n cos_d = cos(direction_d);\n sin_d = sin(direction_d);\n\n x11 = x - vehicle_width_ /2.0 * cos_d;\n y11 = y + vehicle_width_ /2.0 * sin_d;\n x22 = x + vehicle_width_ /2.0 * cos_d;\n y22 = y - vehicle_width_ /2.0 * sin_d;\n\n direction = v_p.ref_points_[i+every].heading_;\n xx = v_p.ref_points_[i+every].position_x_;\n yy = v_p.ref_points_[i+every].position_y_;\n\n direction_d = to_radians(direction);\n cos_d = cos(direction_d);\n sin_d = sin(direction_d);\n\n x33 = xx - vehicle_width_ /2.0 * cos_d;\n y33 = yy + vehicle_width_ /2.0 * sin_d;\n x44 = xx + vehicle_width_ /2.0 * cos_d;\n y44 = yy - vehicle_width_ /2.0 * sin_d;\n\n glBegin(GL_LINE_STRIP);\n glVertex2f(x11,y11);\n glVertex2f(x22,y22);\n glVertex2f(x44,y44);\n glVertex2f(x33,y33);\n glVertex2f(x11,y11);\n glEnd();\n }\n}\n\nvoid ControlView::Draw_Best_March_Point(path& p, int no_points)\n{\n double x;\n double y;\n\n if(no_points <= 0)\n {\n return;\n }\n\n y = p.ref_points_[no_points].position_y_;\n x = p.ref_points_[no_points].position_x_;\n\n double xx = x;\n double yy = y;\n\n glPointSize(8);\n glColor3d(1, 1, 1);\n glBegin(GL_POINTS);\n glVertex2f(xx,yy);\n glEnd();\n\n glColor3d(1,1,1);\n glBegin(GL_LINES);\n glVertex2f(xx - 1, yy);\n glVertex2f(xx + 1, yy);\n glEnd();\n\n glBegin(GL_LINES);\n glVertex2f(xx, yy-1);\n glVertex2f(xx, yy+1);\n glEnd();\n}\n\n\nvoid ControlView::MyGLDispIni()\n{\n GLenum type;\n\n m_eyex=0, m_eyey=0, m_eyez= 80;\n m_centerx=0, m_centery=0, m_centerz=0;\n m_upx=0, m_upy=1, m_upz=0;\n\n buttonSaveLeft=false;\n buttonSaveMiddle=false;\n buttonSaveRight=false;\n\n x_move=0.0;\n y_move=0.0;\n z_move=0.0;\n x_rotate=0.0;\n y_rotate=0.0;\n z_rotate=0.0;\n m_zoom=1;\n\n x_lbefore=0, y_lbefore=0;\n x_rbefore=0, y_rbefore=0;\n z_before1=0, z_before2=0;\n\n type = GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE;\n glutInitDisplayMode(type);\n glutInitWindowSize(740, 860);\n\n string windows_name = \"controller 2080802\" ;\n glutCreateWindow(windows_name.c_str());\n\n glutReshapeFunc( Reshape );\n glutKeyboardFunc(Key);\n glutSpecialFunc(SpecialKey);\n glutMouseFunc(MouseKey);\n glutMotionFunc(MouseMove);\n glutPassiveMotionFunc(PassiveMouseMove);\n glutSpaceballRotateFunc(MouseRotate);\n\n glutDisplayFunc(&myDisplay);\n}\n\nvoid ControlView::Init(ControlLogic*control_logic,double vehicle_width)\n{\n vehicle_width_ = vehicle_width;\n control_logic_ = control_logic;\n}\n\nvoid ControlView::Draw_Org()\n{\n glPointSize(8);\n glColor3d(1, 1, 1);\n glBegin(GL_POINTS);\n glVertex2f(0, 0);\n glEnd();\n //\n glColor3d(1,1,1);\n glBegin(GL_LINES);\n glVertex2f(0 - 2, 0);\n glVertex2f(0 + 2, 0);\n glEnd();\n\n glBegin(GL_LINES);\n glVertex2f(0, 0-3);\n glVertex2f(0, 0+3);\n glEnd();\n}\n\nvoid ControlView::Draw_Point(double x, double y)\n{\n glPointSize(3);\n// glColor3d(0, 0, 1);\n glBegin(GL_POINTS);\n glVertex2f(0, 0);\n glEnd();\n //\n// glColor3d(0,0,1);\n glBegin(GL_LINES);\n glVertex2f(x - 0.5, y);\n glVertex2f(x + 0.5, y);\n glEnd();\n\n glBegin(GL_LINES);\n glVertex2f(x, y - 0.5);\n glVertex2f(x, y + 0.5);\n glEnd();\n}\n\n\nvoid ControlView::DrawCar_e(double x, double y, double x_c, double y_c, double yaw, double yaw_c)\n{\n glLineWidth(2);\n glColor3d(0,1,0);\n glBegin(GL_LINES);\n glVertex2f(x, y);\n glVertex2f(x_c, y_c);\n glEnd();\n}\n\nvoid ControlView::DrawCar(double x, double y, double yaw, double steer_angle)\n{\n glPointSize(8);\n glColor3d(1,0,0);\n glBegin(GL_POINTS);\n glVertex2f( x + (1.2) * sin( yaw /180 *PI ),\n y + (1.2) * cos( yaw /180 *PI ) );\n glEnd();\n\n double frontwtht = to_radians(yaw);\n // double backwtht = to_radians(yaw + 90);\n double wheel_direction = to_radians(yaw + steer_angle/15 + 90); //\n\n double frontWhellCenterX = x + (1.2) * sin(frontwtht);\n double frontWhellCenterY = y + (1.2) * cos(frontwtht);\n double backWhellCenterX = x - (1.5) * sin(frontwtht);\n double backWhellCenterY = y - (1.5) * cos(frontwtht);\n\n glLineWidth(2);\n glColor3d(0,1,0);\n glBegin(GL_LINES);\n glVertex2f(x, y);\n glVertex2f(frontWhellCenterX,frontWhellCenterY );\n glEnd();\n\n glColor3d(1,0,0);\n glBegin(GL_LINES);\n glVertex2f(x, y);\n glVertex2f(backWhellCenterX,backWhellCenterY );\n glEnd();\n\n double frontWhellLeftX = frontWhellCenterX - (( front_track/2.0) * cos( frontwtht));\n double frontWhellLeftY = frontWhellCenterY + (( front_track/2.0) * sin( frontwtht));\n double frontWhellRightX = frontWhellCenterX + (( front_track/2.0) * cos( frontwtht));\n double frontWhellRightY = frontWhellCenterY - (( front_track/2.0) * sin( frontwtht));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(frontWhellLeftX, frontWhellLeftY);\n glVertex2f(frontWhellRightX, frontWhellRightY);\n glEnd();\n\n double frontLeftWhellUpX = frontWhellLeftX - (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontLeftWhellUpY = frontWhellLeftY + (( front_wheel_wide/2.0) * sin( wheel_direction ));\n double frontLeftWhellDownX = frontWhellLeftX + (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontLeftWhellDownY = frontWhellLeftY - (( front_wheel_wide/2.0) * sin( wheel_direction ));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(frontLeftWhellUpX, frontLeftWhellUpY);\n glVertex2f(frontLeftWhellDownX, frontLeftWhellDownY);\n glEnd();\n\n double frontrightWhellUpX = frontWhellRightX - (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontrightWhellUpY = frontWhellRightY + (( front_wheel_wide/2.0) * sin( wheel_direction ));\n double frontrightWhellDownX = frontWhellRightX + (( front_wheel_wide/2.0) * cos( wheel_direction ));\n double frontrightWhellDownY = frontWhellRightY - (( front_wheel_wide/2.0) * sin( wheel_direction ));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(frontrightWhellUpX, frontrightWhellUpY);\n glVertex2f(frontrightWhellDownX, frontrightWhellDownY);\n glEnd();\n\n double BackWhellLeftX = backWhellCenterX - (( front_track/2.0) * cos( frontwtht));\n double BackWhellLeftY = backWhellCenterY + (( front_track/2.0) * sin( frontwtht));\n double BackWhellRightX = backWhellCenterX + (( front_track/2.0) * cos( frontwtht));\n double BackWhellRightY = backWhellCenterY - (( front_track/2.0) * sin( frontwtht));\n\n double backwtht = to_radians(yaw + 90);\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(BackWhellRightX, BackWhellRightY);\n glVertex2f(BackWhellLeftX, BackWhellLeftY);\n glEnd();\n\n double BackLeftWhellUpX = BackWhellLeftX - ((back_wheel_wide/2.0) * cos(backwtht));\n double BackLeftWhellUpY = BackWhellLeftY + (( back_wheel_wide/2.0) * sin(backwtht));\n double BackLeftWhellDownX = BackWhellLeftX + (( back_wheel_wide/2.0) * cos(backwtht));\n double BackLeftWhellDownY = BackWhellLeftY - (( back_wheel_wide/2.0) * sin(backwtht));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(BackLeftWhellDownX, BackLeftWhellDownY);\n glVertex2f(BackLeftWhellUpX, BackLeftWhellUpY);\n glEnd();\n\n double BackrightWhellUpX = BackWhellRightX - ((back_wheel_wide/2.0) * cos(backwtht));\n double BackrightWhellUpY = BackWhellRightY + ((back_wheel_wide/2.0) * sin(backwtht));\n double BackrightWhellDownX = BackWhellRightX + ((back_wheel_wide/2.0) * cos(backwtht));\n double BackrightWhellDownY = BackWhellRightY - ((back_wheel_wide/2.0) * sin(backwtht));\n\n glLineWidth(2);\n glColor3d(1,0,1);\n\n glBegin(GL_LINES);\n glVertex2f(BackrightWhellUpX, BackrightWhellUpY);\n glVertex2f(BackrightWhellDownX, BackrightWhellDownY);\n glEnd();\n}\n\n\n}\n}\n"
},
{
"alpha_fraction": 0.6979591846466064,
"alphanum_fraction": 0.7091836929321289,
"avg_line_length": 19.851064682006836,
"blob_id": "34272b2b8f7665c2acbd633a5186caeb0d45f1ed",
"content_id": "919e90a60cd1f3b85ea98ec3d3c6e6ad9a8de271",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 980,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 47,
"path": "/athena/core/arm/Control/include/lqr_controller/simple_lateral_debug.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file simple_lateral_debug.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_SIMPLE_LATERAL_DEBUG_H_\n#define CONTROLLER_SIMPLE_LATERAL_DEBUG_H_\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class SimpleLateralDebug\n *\n * @brief SimpleLateralDebug.\n */\nclass SimpleLateralDebug{\n public:\n double lateral_error_;\n double ref_heading_;\n double heading_;\n double heading_error_;\n double heading_error_rate_;\n double lateral_error_rate_;\n double curvature_;\n double steer_angle_;\n double steer_angle_feedforward_;\n double steer_angle_lateral_contribution_;\n double steer_angle_lateral_rate_contribution_;\n double steer_angle_heading_contribution_;\n double steer_angle_heading_rate_contribution_;\n double steer_angle_feedback_;\n double steering_position_;\n double ref_speed_;\n double steer_angle_limited_;\n};\n}\n}\n#endif\n"
},
{
"alpha_fraction": 0.570241391658783,
"alphanum_fraction": 0.6129798293113708,
"avg_line_length": 16.54861068725586,
"blob_id": "7127ed858dc8c8cec6f4a6eb76ffdd0eaf0be0c6",
"content_id": "8e2622ee6e13a37eb42172d83ffbed897b2d333c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2527,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 144,
"path": "/athena/core/x86/Camera/vision_ssd_detect/kalman/math_util.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _MATHUTIL_H\n#define _MATHUTIL_H\n\n#include <math.h>\n#include <stdlib.h>\n#include <assert.h>\n#include <float.h>\n\n#ifndef _MSC_VER\n#define _finite finite\n#endif\n\n//#define INFINITY FLT_MAX\n#define M_PI 3.14159265358979323846\n//#define M_PI_2 3.14159265358979323846*2\n//#define NAN 0\n\n#define to_radians(x) ( (x) * (M_PI / 180.0 ))\n#define to_degrees(x) ( (x) * (180.0 / M_PI ))\n\nvoid sincos(double ra, double * sinra, double* cosra);\n// int isless(double x, double y);\n\n\n//inline int isfinite(double x) { return _finite(x); }\n\nstatic inline double sq(double v)\n{\n return v*v;\n}\n\nstatic inline double sgn(double v)\n{\n return (v>=0) ? 1 : -1;\n}\n\n// random number between [0, 1)\nstatic inline float randf()\n{\n return ((float) rand()) / (RAND_MAX + 1.0);\n}\n\n\nstatic inline float signed_randf()\n{\n return randf()*2 - 1;\n}\n\n#ifdef _MSC_VER\nstatic inline double f_min(double a, double b)\n{\n return (a < b) ? a : b;\n}\n\nstatic inline double f_max(double a, double b)\n{\n return (a > b) ? a : b;\n}\n#endif\n// return a random integer between [0, bound)\nstatic inline int irand(int bound)\n{\n int v = (int) (randf()*bound);\n assert(v >= 0);\n assert(v < bound);\n return v;\n}\n\n#ifndef PI\n#define PI 3.14159265358979323846264338\n#endif\n\n#define TWOPI_INV (0.5/PI)\n#define TWOPI (2*PI)\n\n/** valid only for v > 0 **/\nstatic inline double mod2pi_positive(double vin)\n{\n double q = vin * TWOPI_INV + 0.5;\n int qi = (int) q;\n\n return vin - qi*TWOPI;\n}\n\n/** Map v to [-PI, PI] **/\nstatic inline double mod2pi(double vin)\n{\n if (vin < 0)\n return -mod2pi_positive(-vin);\n else\n return mod2pi_positive(vin);\n}\n\n/** Return vin such that it is within PI degrees of ref **/\nstatic inline double mod2pi_ref(double ref, double vin)\n{\n return ref + mod2pi(vin - ref);\n}\n\nstatic inline int theta_to_int(double theta, int max)\n{\n theta = mod2pi_ref(M_PI, theta);\n int v = (int) (theta / ( 2 * M_PI ) * max);\n\n if (v==max)\n v = 0;\n\n assert (v >= 0 && v < max);\n\n return v;\n}\n\nstatic inline int imin(int a, int b)\n{\n return (a < b) ? a : b;\n}\n\nstatic inline int imax(int a, int b)\n{\n return (a > b) ? a : b;\n}\n\nstatic inline float f_min(float a, float b)\n{\n\treturn (a < b) ? a : b;\n}\n\nstatic inline float f_max(float a, float b)\n{\n\treturn (a > b) ? a : b;\n}\n\nstatic inline int iclamp(int v, int minv, int maxv)\n{\n return imax(minv, imin(v, maxv));\n}\n\nstatic inline double fclamp(double v, double minv, double maxv)\n{\n return f_max(minv, f_min(v, maxv));\n}\n\n\n#endif\n"
},
{
"alpha_fraction": 0.6740740537643433,
"alphanum_fraction": 0.7111111283302307,
"avg_line_length": 43.83333206176758,
"blob_id": "dff49cacc7369385d1539666088c32db333e6f84",
"content_id": "73fc5cfc88699bfcebb4c76af5ca04415c5fa2ad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 298,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 6,
"path": "/athena/install_x86.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#将动态库文件拷贝到系统目录下\nsudo -S cp core/x86/Map/lib/*.so /usr/local/lib\nsudo -S cp core/x86/Navi/lib/*.so /usr/local/lib\nsudo -S cp core/x86/Planning/lib/*.so /usr/local/lib\nsudo -S cp core/x86/Control/lib/*.so /usr/local/lib\nsudo -S cp core/x86/Common/lib/*.so /usr/local/lib\n\n"
},
{
"alpha_fraction": 0.590299665927887,
"alphanum_fraction": 0.6327549815177917,
"avg_line_length": 45.67484664916992,
"blob_id": "7ef06ca10e3cea2889c8e2a354e5cd02da25aaa8",
"content_id": "e3137f031315a238026b873c221f2b99ce5ed4a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 10383,
"license_type": "no_license",
"max_line_length": 153,
"num_lines": 163,
"path": "/athena/core/arm/Planning/include/planning/planning_param.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 局部规划的相关参数。\n */\n\n #ifndef _ATHENA_PLANNING_PARAM_H\n #define _ATHENA_PLANNING_PARAM_H\n\n /**\n * @namespace athena::planning\n * @brief athena::planning\n */\nnamespace athena{\nnamespace planning{\n\n // 档位参数\n #define AT_STATUS_P 0 ///<档位信息,P档\n #define AT_STATUS_R 1 ///<档位信息,R档\n #define AT_STATUS_N 2 ///<档位信息,N档\n #define AT_STATUS_D 3 ///<档位信息,D档\n #define AT_STATUS_M 4 ///<档位信息,M档\n\n //车型相关参数\n #define WHEEL_BASE 2.5 ///<车轮轴长,单位:米\n #define CAR_LENGTH 4.5 ///<车身长,单位:米\n #define CAR_WIDTH 1.5 ///<车身宽,单位:米\n #define CAR_HIGH 1.515 ///<车身高,单位:米\n #define CAR_WEIGHT 1245 ///<车身质量,单位kg\n #define CAR_MIN_R 6.0 ///<车最小转弯半径,单位:米\n #define MAX_STEERING_ANGLE +548 ///<最大方向盘转角\n #define MIN_STEERING_ANGLE -548 ///<最小方向盘转角\n #define STEERING_RATIO 15 ///<方向盘和车轮转角的比例关系\n\n //泊车相关参数\n #define SAFE_DISTANCE_PARK 0.8 ///<停车的安全保护距离(前后)\n #define SAFE_WIDTH_PARK 0.4 ///<停车的安全保护宽度(两侧)\n #define PARK_LANE_WIDTH 8.0 ///<泊车时泊车通道的道路宽度\n #define H_MIN 3.0 ///<泊车时纵向距离H的最小值\n #define H_MAX 7.0 ///<泊车时纵向距离H的最大值\n #define H_STEP 1.0 ///<泊车时纵向距离H的取值步长\n #define S_STEP 1.0 ///<泊车时横向距离S的取值步长\n #define DELTA_S_MAX 5.0 ///<泊车时横向距离S向前搜索的范围\n #define EXTENDED_LINE_OF_PARALLEL 5.0 ///<平行泊车时向前延展的距离\n #define EXTENDED_LINE_OF_VERTICAL 5.0 ///<垂直泊车时向前延展的距离\n #define PARK_SPEED 5.0 ///<泊车时的速度,单位:km/h\n #define THRESHOLD_START_PARK 20.0 ///<车与车库中心的距离小于该阈值时,可以触发泊车\n #define THRESHOLD_CAR_STATIC_SPEED 0.1 ///<本车车速小于该阈值可以认为车静止,单位:m/s\n #define NUM_EXTEND_TRAJECTORY 600 ///<给控制发轨迹时,需要延长一些(点的个数)\n\n //地图匹配参数\n #define PRIOR_MATCH_LANE 1\t\t\t///<优先匹配车道,1=低速道,0=高速道\n #define THRESHOLD_MATCH_CENTERS 0.5\t\t///<匹配多车道中心线的阈值(米)\n #define GLOBAL_SEARCH_MATCH_CENTERS 10.0\t///<匹配车道中心线粗搜索时的限差范围(米)\n #define NUM_BEFORE_MATCH_CENTERS 5\t\t///<匹配车道中心线时向前搜索的点数\n #define THRESHOLD_MATCH_BIAS 10.0\t\t///<点匹配到中心线的距离过大,认为匹配失败(米)\n #define COEF1_MOTION_WITHOUT_VP 15.0\t\t///<没有virtual_path时(第一次规划目的地),计算MOTION_PLANNING_LENGTH的系数1(常数项)\n #define COEF2_MOTION_WITHOUT_VP 1.0\t\t///<没有virtual_path时(第一次规划目的地),计算MOTION_PLANNING_LENGTH的系数2(横向偏差系数)\n #define COEF1_AHEAD_WITHOUT_VP 0.0\t\t///<没有virtual_path时(第一次规划目的地),计算AHEAD_OF_MOTION_PLANNING的系数1(常数项)\n #define COEF2_AHEAD_WITHOUT_VP 0.0\t\t///<没有virtual_path时(第一次规划目的地),计算AHEAD_OF_MOTION_PLANNING的系数2(速度系数)\n #define COEF1_MOTION_WITH_VP 10.0\t\t///<有virtual_path时,计算MOTION_PLANNING_LENGTH的系数1(常数项)\n #define COEF2_MOTION_WITH_VP 3.0\t\t///<有virtual_path时,计算MOTION_PLANNING_LENGTH的系数2(速度系数)\n #define COEF1_AHEAD_WITH_VP 0.5\t\t///<有virtual_path时,计算AHEAD_OF_MOTION_PLANNING的系数1(常数项)\n #define COEF2_AHEAD_WITH_VP 0.2\t\t///<有virtual_path时,计算AHEAD_OF_MOTION_PLANNING的系数2(速度系数)\n #define MIN_MOTION_LENGTH 5.0\t\t\t///<一次规划的最小长度(米)\n #define MAX_MOTION_DELTA_HEADING 30.0\t\t///<一次规划的最大角度差(度)\n #define INTERVAL_MAP_SECTION 1.5\t\t///<下发的地图切片点的间隔(米)\n #define SPLINE_EVERY 0.1\t\t\t///<规划轨迹点间隔(米)\n #define MAP_SPLINE_EVERY 1.5\t\t\t///<地图中心线点间隔(米)\n #define MATCH_STOP_POINT_ERROR 2.0\t\t///<匹配停车点时点距离线的最小限差(米)\n #define TRAFFIC_LIGHTS_CHECKS_LENGTH 40; ///<路口红绿灯停车点的检查距离(个)\n #define BEFORE_LIGHTS 1; ///<路口提前停车距离 (个)\n\n //障碍物\n #define NUMBER_BACKWARD 70\t\t\t///<障碍物根据边界过滤时向后搜索的点数\n #define NUMBER_FORWARD 70\t\t\t///<障碍物根据边界过滤时向前搜索的点数\n #define PEDESTRIAN_WIDEN_DIS 2.5\t\t///<行人加宽距离范围(米)\n #define PEDESTRIAN_WIDEN_ANG 0.0\t\t///<行人加宽角度范围(度)\n #define PEDESTRIAN_WIDEN_WIDTH 0.0\t\t///<行人加宽的宽度(米)\n #define CAR_LENGTHEN_LENGTH 0.0\t\t///<障碍车加长的长度(米)\n #define OBSTACLE_COST_VALUE 30000\t\t///<障碍物的代价值\n #define B_READ_OBSTACLE_SPEED 1\t\t///<0:不读取障碍物速度;1:读取障碍物速度\n\n //碰撞检测\n #define COL_CHECK_INTERVAL 15\t\t\t///<碰撞检测时,虚拟车道的搜索间隔(个)\n #define THRESHOLD_DELTA_LENGTH 2.0\t\t///<虚拟车道搜索时,前后点间距超过一定的阈值,就调整搜索间隔(米)\n #define THRESHOLD_STATIC_SPEED 0.5\t\t///<速度小于THRESHOLD_STATIC_SPEED m/s,认为是静态障碍物,需要停车或避障(m/s)\n #define RATIO_SPEED_CAR_FOLLOWING 0.7 ///障碍物速度小于本车速度该比例时,选择超车\n #define CAR_FOLLOWING_SPEED_DIFF 1.5\t\t///<调整车速略小于跟随的障碍物速度(m/s)\n #define SAFE_WIDTH 0.5 \t\t\t///<安全保护的距离(米)\n #define SAFE_LENGTH 0.3 \t\t\t///<安全保护的距离(米)\n #define FREE_LENGTH_DIFF 15.0\t\t\t///<如果车道都有碰撞,取碰撞距离更大的(米)\n #define COUNT_COLLISION 10\t\t\t///<持续超过COUNT_COLLISION帧有碰撞才进行重规划,否则只是减速\n// #define SLOW_DOWN_STEP 0.1\t\t\t///<减速的步长(程序中会重新计算)\n #define COEF_SLOW_DOWN_STEP 0.1\t\t///<SLOW_DOWN_STEP 0.5 * motion_tar_speed / COUNT_COLLISION;\n #define COEF_COL_CHECK_LENGTH 1.5\t\t///<决定碰撞检测长度:COLLISION_CHECK_LENGTH=COEF_COL_CHECK_LENGTH * ( MOTION_PLANNING_LENGTH + AHEAD_OF_MOTION_PLANNING );\n #define COUNT_SLOW 10\t\t\t///<遇见障碍物减速后保持低速一段时间\n\n //轨迹规划相关参数\n #define COEF_UPDATE_TRAJECTORY 0.4\t\t///<更新轨迹的比例参数,决定走过多少里程更新一次轨迹\n #define AHEAD_OF_MOTION_PLANNING 12\t\t///<单位(米)\n #define MOTION_PLANNING_LENGTH 42\t\t///<单位(米)\n #define AFTER_MOTION_PLANNING_NUM 40\t\t///<单位(个)\n\n //轨迹生成【横向】\n #define LAT_OFFSET 4.0 \t\t\t///<用于生成轨迹范围(左右各LAT_OFFSET米)\n #define COEF_LIMIT_LAT_STEP 0.1\t\t///<拨杆换道限制、障碍物减速过程或者经过路口时,横向偏移范围收缩的比例系数\n #define NUM_TRAJ_CLUSTER 16\t\t\t///<生成轨迹簇的个数(2×NUM_TRAJ_CLUSTER+1)\n #define NUM_BACKWARD_TRAJ_CHECK 5\t\t///<轨迹边界检测时向后搜索的点数(个)\n #define NUM_FORWARD_TRAJ_CHECK 5\t\t///<轨迹边界检测时向前搜索的点数(个)\n #define OUT_SIDE_INTERVAL 20\t\t\t///<判断轨迹是否出界时的搜索步长(个)\n #define OUT_LINE_COST_VALUE 200\t\t///<出车道线的代价值\n #define OUT_EDGE_COST_VALUE 60000\t\t///<出道路边缘的代价值\n #define COEF_END_POS 20.0\t\t\t///<综合计算代价值时,终点代价值的系数\n #define COEF_COL_CHECK 1.0\t\t\t///<综合计算代价值时,碰撞代价值的系数\n #define COEF_LEFT 1.0\t\t\t\t///<综合计算代价值时,左边线代价值的系数\n #define COEF_RIGHT 1.0\t\t\t///<综合计算代价值时,右边线代价值的系数\n #define COEF_KS 10000.0\t\t\t///<综合计算代价值时,曲率代价值的系数\n #define THRESHOLD_KS 0.285\t\t\t///<当曲率大于THRESHOLD_KS时,考虑曲率代价值\n #define THRESHOLD_COST_VALUE 20000\t\t///<当最优路径的代价值还大于THRESHOLD_COST_VALUE时,需要停车\n\n //轨迹生成【纵向】\n #define STOP_LENGTH_TO_OBS 100\t\t\t///<停车位置距离障碍物的距离\n #define COEF_KS_SPEED 0.005\t\t\t///<速度、曲率转换\n #define MIN_SPEED 1.0\t\t\t\t///<最小速度(m/s)\n #define MAX_ACCELERATION 1.0\t\t\t///<最大加速度\n #define MAX_DECELERATION 0.5\t\t\t///<最大减速度\n\n //origin point\n #define ORIGIN_LAT 31.281675599\t///<坐标原点纬度 //shanghai\n #define ORIGIN_LON 121.163174090 ///<坐标原点经度\n// #define ORIGIN_LAT 39.7409469\t///<坐标原点纬度 //tongzhou\n// #define ORIGIN_LON 116.6545923 ///<坐标原点经度\n// #define ORIGIN_LAT 30.45814058804\t///<坐标原点纬度 //wuhan\n// #define ORIGIN_LON 114.31801222674 ///<坐标原点经度\n\n #define MAP_LOG_SWITCH 0\n #define TRAJECTORY_LOG_SWITCH 0\n #define TRAFFIC_LIGHTS_LOG_SWITCH 0\n #define CHANGE_LANE_LOG_SWITCH 0\n #define LATERAL_CONTROL_LOG_SWITCH 0\n #define LONGITUDINAL_CONTROL_LOG_SWITCH 0\n #define MAP_MATCHING_LOG_SWITCH 0\n #define SELECT_VALUE_LOG_SWITCH 0\n #define VIRTUAL_PATH_LOG_SWITCH 0\t///<实时匹配虚拟轨迹的状态\n\n #define TRAJECTORY_VIEW_SWITCH 0\n #define SPEED_PLAN_VIEW_SWITCH 0\n #define CHANGE_LANE_VIEW_SWITCH 0\n #define LATERAL_CONTROL_VIEW_SWITCH 0\n #define LONGITUDINAL_CONTROL_VIEW_SWITCH 0\n #define MAP_MATCHING_VIEW_SWITCH 0\n #define COLLISION_CHECK_VIEW_SWITCH 0\n #define PLANNING_VALUE_VIEW_SWITCH 0\n #define NEXTWORK_CHANGELANE_VIEW_SWITCH 0\n #define SELECT_VALUE_VIEW_SWITCH 0\n\n\n\n} //namespace planning\n} //namespace athena\n\n#endif //_ATHENA_PLANNING_PARAM_H\n"
},
{
"alpha_fraction": 0.707317054271698,
"alphanum_fraction": 0.7398374080657959,
"avg_line_length": 19.5,
"blob_id": "3a03c0d3c4c818ebe768b4c0ecb3cde299b8d5cf",
"content_id": "d772010ad261d868ebf1f82e1d77a79a3e2e8ff5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 123,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 6,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/brake/cs55/cs55_deceleration_brake_map.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"cs55_deceleration_brake_map.h\"\n\n double CS55GetBrakeVaule(double deceleration)\n {\n return (deceleration);\n }\n"
},
{
"alpha_fraction": 0.5946919918060303,
"alphanum_fraction": 0.5963302850723267,
"avg_line_length": 24.647058486938477,
"blob_id": "af74d1f924bf4a787807cd7505475f1036e4ebcf",
"content_id": "4879072d92f2a1c388ce1cdc1c17ef3ddd287c35",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3154,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 119,
"path": "/athena/core/x86/Planning/include/trajectory/trajectory_sets.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <math.h>\n#include <iostream>\n#include <vector>\n\n//#include \"common/ecu.h\"\n#include \"common/navi_point.h\"\n#include \"collision_check/collision_check.h\"\n#include \"planning/route_data.h\"\n\n\n#include \"trajectory.h\"\n\nusing namespace std;\n\ndouble calculate_sign_and_value_offset(\n double x, double y, double yaw,\n double x_c, double y_c, double yaw_c);\n\nvoid calculate_d_dd(double current_heading,\n double current_steer_angle,\n double current_speed_lon,\n double delta_s,\n double heading_r,\n double& d_off,\n double& dd_off);\n\ndouble offset_of_trajectory_from_lane(\n trajectory_cubic& tr,\n path& p,\n double offset ,\n int change_lane_ending_lane,\n int in_lane);\n\nclass trajectory_sets\n{\npublic:\n\npublic:\n trajectory_sets();\n ~trajectory_sets();\n};\n\nclass trajectory_cubic_sets\n : public trajectory_sets\n{\npublic:\n // 生成的所有轨迹\n vector< trajectory_cubic > trajectorys;\n // 参考轨迹的所有点\n vector<navi_point> ref_path_points;\n // best_one\n trajectory_cubic* p_best_trajectorys;\n // current_trajectory\n trajectory_cubic traj_current;\n\n int best_one;\n\n double block_rate;\n\n double s_pos;\n double e_pos;\n\n navi_point pose_begin_;\n navi_point pose_end_;\n int num_begin_;\n int num_end_;\n vector< double > tail_lat_offset_; //尾部拼接的偏移量\n\n bool is_change_lane_limit_;\n\npublic:\n trajectory_cubic_sets();\n ~trajectory_cubic_sets();\n\n void clear_all();\n\n /**\n * @brief 设置轨迹的始末点\n */\n int set_begin_end_pose( const navi_point& pose_begin, const navi_point& pose_end,\n const int& num_begin, const int& num_end );\n\n /**\n * @brief 生成最优轨迹\n */\n int genenrate_best_trajectory( const RoadSurface& road_surface,\n const route_planning_m& route,\n const double& car_speed,\n const int car_action,\n path &out_best_trajectory );\n\n void set_ref_path_length(vector<navi_point>& ref_, int start_pos , int end_pos);\n\n // 障碍物判断\n void cluster_collision_check( const RoadSurface& road, const double car_speed );\n void collision_check(RoadSurface& road);\n void cluster_ks_check();\n void consistency_check();\n int nearest_tail_check(\n path& lane,\n int change_lane_ending_lane,\n int in_lane);\n\n //综合评价\n int select_best_traj( const route_planning_m& route_planning, const int ref_pos1, const int ref_pos2, const int car_action );\n\n\n void speed_plan(double max_speed);\n\n\n int out_side_traj_check( line_xys left_edge, line_xys right_edge, int st_pos, int en_pos, double cost_value );\n\n ///生成轨迹簇\n int gen_traj_cluster( navi_point pos_sta, vector< navi_point > pos_ends_1, vector< navi_point > pos_ends_2 );\n int gen_traj_cluster( navi_point pos_sta, vector< navi_point > pos_ends_1 );\n\n};\n"
},
{
"alpha_fraction": 0.4155496060848236,
"alphanum_fraction": 0.4584450423717499,
"avg_line_length": 19.72222137451172,
"blob_id": "43cba605048a8ee06709f4cf124c5ecf4baf432a",
"content_id": "0ab8b8d22ccdc161c59f409d352d0cfee0f475d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 439,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 18,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/nad_oam.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_oam.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:引用了oam目录下的所有头文件\n-------------------------------------------------------*/\n#ifndef _NAD_OAM_H\n#define _NAD_OAM_H\n\n#include \"alarm/nad_ui_alarm.h\"\n#include \"alarm/nad_warning.h\"\n#include \"log/nad_ui_log.h\"\n\n//引用msg头文件\n#include \"../msg/nad_msg.h\"\n\n\n#endif\n"
},
{
"alpha_fraction": 0.6685460209846497,
"alphanum_fraction": 0.67487633228302,
"avg_line_length": 41.21294403076172,
"blob_id": "b8713e1e5d3a5833975e65eea5aa1a4be920e1f1",
"content_id": "0ac2309373ecb124849dc8b273f0b27d48182d5c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 20220,
"license_type": "no_license",
"max_line_length": 267,
"num_lines": 479,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/imrgb.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _IMRGB_H_\n#define _IMRGB_H_\n\n#include <stdio.h>\n#include <string.h>\n\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n#include \"type.h\"\n\n#define VALMAX\t255\n#define\tRGB_R\t0\n#define\tRGB_G\t1\n#define\tRGB_B\t2\n\n#define LOBYTE(w) ((Uchar)(((Ulong)(w)) & 0xff))\n#define GetRValue(rgb) (LOBYTE(rgb))\n#define GetGValue(rgb) (LOBYTE(((Ushort)(rgb)) >> 8))\n#define GetBValue(rgb) (LOBYTE((rgb)>>16))\n\nenum ColorPlane\t{\tCP_RED = 0, CP_GREEN, CP_BLUE, CP_AVE, CP_ALL\t};\n\nclass IMRGB\n{\nprivate:\n int _width;// = 0;\n int _height;// = 0;\n //Uchar *_data;// = NULL;\t\t// bgrbgr...\n char *loadHeaderOfPPM(FILE *fp);\n BOOL loadFromBinaryPPM(FILE *fp);\n BOOL loadFromAsciiPPM(FILE *fp);\n char *loadHeaderOfPGM(FILE *fp);\n BOOL loadFromBinaryPGM(FILE *fp);\n BOOL loadFromAsciiPGM(FILE *fp);\npublic:\n //////////////////////////////////////////////////////////////////////////\n Uchar *_data;// = NULL;\t\t// bgrbgr...\n //////////////////////////////////////////////////////////////////////////\n IMRGB(void);\n IMRGB(int width,int height);\n IMRGB(int width,int height, Uchar *pucR, Uchar *pucG, Uchar *pucB);\n IMRGB(char *fname);\t\t//inline: load from PPM file\n IMRGB(FILE *fp);\t\t//inline: load from PPM file\n IMRGB(IMRGB &);\t\t\t// duplicator\n //IMRGB(LPBITMAPINFOHEADER pbmih);\n ~IMRGB();\n inline int width(void);\t\t//inline\n inline int height(void);\t\t//inline\n inline Uchar &r(int i,int j);//inline\n inline Uchar &g(int i,int j);//inline\n inline Uchar &b(int i,int j);//inline\n inline Uchar v(ColorPlane cp, int i, int j);\n inline Uchar *data(void);\n inline Uchar *dataOfLine(int h);\n BOOL loadFromPPM(char *fname);\n BOOL loadFromPPM(FILE *fp = stdin);\n BOOL loadFromPGM(char *fname);\n BOOL loadFromPGM(FILE *fp = stdin);\n BOOL saveAsPPM(FILE *fp = stdout);\n BOOL saveAsPPM(char *fname);\n BOOL saveAsPGM(int plane, FILE *fp = stdout);\n BOOL saveAsPGM(int plane, char *fname);\n BOOL saveAsXLS(FILE *fp = stdout);\n BOOL saveAsXLS(char *fname);\n BOOL loadFromBMP(char *fname);\n BOOL loadFromBMP(FILE *fp = stdin);\n BOOL loadFromBMR(char *, char *, int);//20031017 yamaga\n //\tIMRGB *loadFromBMR(char *fname, int);//20031015 sugaya\n BOOL loadFromBMR(FILE *fp);\n BOOL loadFromPNR(char *, char *, int);\n BOOL loadFromPNR(FILE *fp, Uchar *pFrameData);\n BOOL writeToPNR(FILE *fp);\n BOOL saveAsBMP24(FILE *fp = stdout);\n BOOL saveAsBMP24(char *fname);\n BOOL loadFromPNG(char *fname);\n BOOL loadFromJPG(char *fname);\n BOOL saveAsJPG(char *fname, int iQuality);\n IMRGB *flipHorizontal(void);\n IMRGB *flipVertical(void);\n IMRGB *rotatePi(void);\n IMRGB *rotateQuarter(void);\n IMRGB *rotateQuarter(IMRGB *dst);\n IMRGB *rotateThreeQuarters(void);\n IMRGB *rotateThreeQuarters(IMRGB *dst);\n\n void verticalSlide(int di);\n void horizontalSlide(int dj);\n\n IMRGB *shrink(int scale);\n IMRGB *shrink(IMRGB *dst, int scale);\n IMRGB *shrink(int vscale, int hscale);\n IMRGB *shrink(IMRGB *dst, int vscale, int hscale);\n\n IMRGB *enlarge(int scale);\n IMRGB *enlarge(IMRGB *dst, int scale);\n IMRGB *enlarge(int vscale, int hscale);\n IMRGB *enlarge(IMRGB *dst, int vscale, int hscale);\n\n IMRGB *resize(int iSrcTop, int iSrcLeft, int iSrcHeight, int iSrcWidth, int iDstHeight, int iDstWidth);\n IMRGB *partial(int uli, int ulj, int height, int width);\n IMRGB *partial(IMRGB *dst, int uli, int ulj, int height, int width);\n\n//#ifndef\tFOR_TMC\n//\t\tIMRGB *partial(AwBox *pBox);\n//\t\tIMRGB *partial(IMRGB *dst, AwBox *pBox);\n//#endif\tFOR_TMC\n\n IMRGB *upperHalf(void);\n IMRGB *upperHalf(IMRGB *dst);\n IMRGB *lowerHalf(void);\n IMRGB *lowerHalf(IMRGB *dst);\n\n IMRGB *upperHalfWithOriginalSize(void);\n IMRGB *upperHalfWithOriginalSize(IMRGB *dst);\n IMRGB *lowerHalfWithOriginalSize(void);\n IMRGB *lowerHalfWithOriginalSize(IMRGB *dst);\n\n Ulong *packToLong(void);\n Ulong *packToLong(Ulong *);\n Ulong *packToLongWithVerticalFlip(void);\n Ulong *packToLongWithVerticalFlip(Ulong *);\n Uchar *packToBGR(void);\n Uchar *packToBGR(Uchar *);\n Uchar *packTo8(void);\n Uchar *packTo8(Uchar *);\n Uchar *packTo8(int plane);\n Uchar *packTo8(Uchar *, int plane);\n inline void clear(void);\n inline void fillWhite(void);\n void putIMRGB(IMRGB *src, int i, int j);\n void putIMRGBWithBackGround(IMRGB *src, int uli, int ulj, int iBGred, int iBGgreen, int iBGblue);\n void putIMRGBAlpha(IMRGB *src, int uli, int ulj, double dAlpha);\n void putIMRGBAlpha(IMRGB *src, int uli, int ulj);\n void putBinaryAlpha(IMRGB *src, int uli, int ulj, double dAlpha, int iRed, int iGreen, int iBlue);\n void drawPoint(int i, int j, int r, int g, int b);\n void drawPoint(int i, int j, int r, int g, int b, double alpha);\n void drawPoint(int i, int j, int r, int g, int b, int scale);\n void drawPoint3(int i, int j, int r, int g, int b);\n void drawCross(int i, int j, int r, int g, int b, int scale);\n void drawCross(int i, int j, int r, int g, int b, int scale, int tick);\n void drawCross3(int i, int j, int r, int g, int b, int scale, int tick);\n void drawCrossX(int i, int j, int r, int g, int b, int scale);\n void drawPoint2(int i, int j, int r, int g, int b);\n void drawLine(int si, int sj, int ei, int ej, int r, int g, int b);\n void drawLine(int si, int sj, int ei, int ej, int r, int g, int b, double transparency);\n void drawThickLine(int si, int sj, int ei, int ej, int r, int g, int b, int iThickness);\n void drawLineWithClipping(int si, int sj, int ei, int ej, int rv, int gv, int bv, int imin, int imax, int jmin, int jmax);\n void drawLine2(int si, int sj, int ei, int ej, int r, int g, int b);\n void drawLine3(int si, int sj, int ei, int ej, int r, int g, int b);\n void drawDashLine(int si, int sj, int ei, int ej, int rv, int gv, int bv, int dash_step);\n void drawThickDashLine(int si, int sj, int ei, int ej, int rv, int gv, int bv, int iThickness, int dash_step);\n void drawLineWithFilledArrow(int si, int sj, int ei, int ej, int arrowLength, double arrowAngle, int r, int g, int b);\n void drawThickLineWithFilledArrow(int si, int sj, int ei, int ej, int arrowLength, double arrowAngle, int r, int g, int b, int iThickness);\n void drawRect(int uli, int ulj, int h, int w, int r, int g, int b);\n void drawThickRect(int uli, int ulj, int h, int w, int r, int g, int b, int iThickness);\n void drawRect(int uli, int ulj, int h, int w, COLORREF cr_v);\n void drawRectFill(int uli, int ulj, int h, int w, int r, int g, int b);\n void drawRectFill(int uli, int ulj, int h, int w, int r, int g, int b, double transparency);\n void drawRectFill(int uli, int ulj, int h, int w, COLORREF cr_v);\n void drawRectFill(int uli, int ulj, int h, int w, COLORREF cr_v, double transparency);\n void drawRectWithRotation(int uli, int ulj, int h, int w, int r, int g, int b, double rad);\n void drawRectFillWithRotation(int uli, int ulj, int h, int w, int r, int g, int b, double rad);\n//#ifndef\tFOR_TMC\n//\t\tvoid drawRect(AwBox *pBox, int r, int g, int b);\n//\t\tvoid drawRect(AwBox *pBox, COLORREF cr_v);\n//\t\tvoid drawRectFill(AwBox *pBox, int r, int g, int b);\n//\t\tvoid drawRectFill(AwBox *pBox, int r, int g, int b, double transparency);\n//\t\tvoid drawRectFill(AwBox *pBox, COLORREF cr_v);\n//\t\tvoid drawRectFill(AwBox *pBox, COLORREF cr_v, double transparency);\n//\t\tvoid drawRectWithRotation(AwBox *pBox, int r, int g, int b, double rad);\n//\t\tvoid drawRectFillWithRotation(AwBox *pBox, int r, int g, int b, double rad);\n//#endif\tFOR_TMC\n\n void drawPointWithPseudColor(int i, int j, double v, double min_v = 0.0, double max_v = 1.0);\n//\t\tvoid drawTriangularFill(int i0, int j0, double i1, double j1, double i2, double j2, int rv, int gv, int bv);\n void drawTriangularFill(int i0, int j0, int i1, int j1, int i2, int j2, int rv, int gv, int bv);\n void drawLinePolar(double rho, double theta, int r, int g, int b);\n void drawLinePolar2(double rho, double theta, int r, int g, int b);\n void drawLinePolar(int i0, int j0, double rho, double theta, int r, int g, int b);\n void drawLinePolar(int i0, int j0, double rho, double theta, int r, int g, int b, double transparency);\n void calcBoundaryPointOnLinePolar(int i0, int j0, double rho, double theta, int *piIb0, int *piJb0, int *piIb1, int *piJb1);\n void drawPoint(int i, int j, COLORREF cr_v);\n void drawPoint(int i, int j, COLORREF cr_v, int scale);\n void drawCross(int i, int j, COLORREF cr_v, int scale);\n void drawCross3(int i, int j, COLORREF cr_v, int scale);\n void drawCrossX(int i, int j, COLORREF cr_v, int scale);\n void drawPoint2(int i, int j, COLORREF cr_v);\n void drawLine(int si, int sj, int ei, int ej, COLORREF cr_v);\n void drawLineWithClipping(int si, int sj, int ei, int ej, COLORREF cr_v, int imin, int imax, int jmin, int jmax);\n void drawLine2(int si, int sj, int ei, int ej, COLORREF cr_v);\n void drawDashLine(int si, int sj, int ei, int ej, COLORREF cr_v, int dash_step);\n\n void drawEllipse(int i0, int j0, int h, int w, double phi, double ts, double te, int r, int g, int b);\n void drawEllipse(int i0, int j0, int h, int w, double phi, int r, int g, int b);\n void drawEllipseFill(int i0, int j0, int h, int w, double phi, double ts, double te, int r, int g, int b);\n void drawEllipseFill(int i0, int j0, int h, int w, double phi, int r, int g, int b);\n void drawPie(int i0, int j0, int h, int w, double phi, double ts, double te, int r, int g, int b);\n\n//#ifndef\tFOR_TMC\n//\t\tvoid drawPolyLine(AwPoints *pPoints, int r, int g, int b);\n//\t\tvoid drawPolygone(AwPoints *pPoints, int r, int g, int b);\n//\t\tvoid drawPolygone(AwPoints *pPoints, int r, int g, int b, int iThickness);\n//\t\tvoid drawPolygoneFill(AwPoints *pPoints, int r, int g, int b);\n//#endif\tFOR_TMC\n\n IMRGB *W720toW640(void);\n IMRGB *W720toW640(IMRGB *dst);\n void putIMRGBEachPlane(IMRGB *src, int uli, int ulj, BOOL fR, BOOL fG, BOOL fB);\n void convertToGray(ColorPlane cp);\n\n void convertToGrayScaleImage(int plane);\n void convertToGrayScaleImage(int plane, IMRGB *dst);\n\n IMRGB *sobel(void);\n IMRGB *sobel(IMRGB *pDst);\n Uchar medianValue(ColorPlane cp, int i, int j, int size = 3);\n void median(ColorPlane cp);\n void median(ColorPlane cp, IMRGB *dst);\n void median(void);\n void median(IMRGB *dst);\n Uchar snnfValue(ColorPlane cp, int i, int j);\n void snnf(ColorPlane cp);\n void snnf(ColorPlane cp, IMRGB *dst);\n void snnf(void);\n void snnf(IMRGB *dst);\n void snnfSelf(ColorPlane cp);\n void snnfSelf(void);\n void toEven(void);\n void toOdd(void);\n\n IplImage *toIplImage8(ColorPlane cp);\n IplImage *toIplImage8(ColorPlane cp, IplImage *);\n IplImage *toIplImage(void);\n IplImage *toIplImage(IplImage *);\n\n void inverse(void);\n\n BOOL histogram(int iBinNumber, int *r_hist, int *g_hist, int *b_hist);\n\n Ulong sad(int iUpper, int iLeft, int iHeight, int iWidth, IMRGB *pImTarget, int iTargetUpper, int iTargetLeft, ColorPlane cp);\n long searchMinimumSAD(int iUpper, int iLeft, int iHeight, int iWidth, IMRGB *pImTarget, int iTargetUpper, int iTargetLeft, int iTargetHeight, int iTargetWidth, int *piIpos, int *piJpos, ColorPlane cp);\n\n BOOL difference(IMRGB *pImSubtractor);\n BOOL binarize(Uchar th);\n BOOL binarize(Uchar th, IMRGB *dst);\n void convertToPseudColor2(void);\n void convertToPseudColor2(IMRGB *dst);\n void expandDynamicRange(int iMinV, int iMaxV);\n void expandDynamicRange(IMRGB *dst, int iMinV, int iMaxV);\n\n void edgeHistogram(ColorPlane cp, int iTop, int iLeft, int iBottom, int iRight, double *pdHistV, double *pdHistH);\n void drawHistogram(double *pdHistV, double *pdHistH, int iSize, int r, int g, int b);\n\n void calcStatistic(ColorPlane cp, int iTop, int iLeft, int iBottom, int iRight, int *piAverage, int *piStdDev);\n void calcStatisticHorizontalDifferential(ColorPlane cp, int iTop, int iLeft, int iBottom, int iRight, int *piAverageDif, int *piStdDevDif);\n double calcIntensityAverage(ColorPlane cp, int iIcenter, int iJcenter, int iHeight, int iWidth);\n BOOL calcIntensityAverageWithFOE(ColorPlane cp, int iIfoe, int iJfoe, int iIcenter, int iJcenter, int iHeight, int iWidth, double *pdLeftAverage, double *pdRightAverage);\n BOOL calcIntensityVarianceWithFOE(ColorPlane cp, int iIfoe, int iJfoe, int iIcenter, int iJcenter, int iHeight, int iWidth, double *pdLeftVariance, double *pdRightVariance);\n double calcIntensityAverageOfHorizontalLine(ColorPlane cp, int iIcenter, int iHeight, int iWidth, double *pdAverage);\n double calcIntensityAverageRatioOfHorizontalLineWithFOE(ColorPlane cp, int iIfoe, int iJfoe, int iIcenter, int iHeight, int iWidth, double *pdRatio);\n double calcIntensityVarianceRatioOfHorizontalLineWithFOE(ColorPlane cp, int iIfoe, int iJfoe, int iIcenter, int iHeight, int iWidth, double *pdRatio);\n};\n//#ifndef\tFOR_TMC\n//extern IMRGB *makeIMRGBfromIplImage8(IplImage *pIplImage, IMRGB *pDst);\n//extern IMRGB *makeIMRGBfromIplImage8(IplImage *pIplImage);\n//extern IMRGB *makeIMRGBfromIplImage(IplImage *pIplImage, IMRGB *pDst);\n//extern IMRGB *makeIMRGBfromIplImage(IplImage *pIplImage);\n//#endif\n\nextern double calcNormalizedCorrelation( IMRGB *pImrgbTemp, IMRGB *pImrgbDst, int iTempTop, int iTempLeft, int iTempHeight, int TempiWidth,\t int iDstTop, int iDstLeft);\nextern BOOL searchByNormalizedCorrelation( IMRGB *pImrgbTemp, IMRGB *pImrgbSearch, int iTempTop, int iTempLeft, int iTempHeight, int iTempWidth, int iSearchTop, int iSearchLeft, int iSearchHeight, int iSearchWidth, int *piMaxIdxV, int *piMaxIdxH, double *pdMaxValue);\nextern double calcSAD( IMRGB *pImrgbTemp, IMRGB *pImrgbDst, int iTempTop, int iTempLeft, int iTempHeight, int TempiWidth,\t int iDstTop, int iDstLeft);\nextern BOOL searchBySAD( IMRGB *pImrgbTemp, IMRGB *pImrgbSearch, int iTempTop, int iTempLeft, int iTempHeight, int iTempWidth, int iSearchTop, int iSearchLeft, int iSearchHeight, int iSearchWidth, int *piMaxIdxV, int *piMaxIdxH, double *pdMaxValue);\nextern double calcSSD( IMRGB *pImrgbTemp, IMRGB *pImrgbDst, int iTempTop, int iTempLeft, int iTempHeight, int TempiWidth,\t int iDstTop, int iDstLeft);\nextern BOOL searchBySSD( IMRGB *pImrgbTemp, IMRGB *pImrgbSearch, int iTempTop, int iTempLeft, int iTempHeight, int iTempWidth, int iSearchTop, int iSearchLeft, int iSearchHeight, int iSearchWidth, int *piMaxIdxV, int *piMaxIdxH, double *pdMaxValue);\n\nextern IMRGB *calcSubAbs(IMRGB *pIMRGBminuend, IMRGB *pIMRGBsubtracter, IMRGB *pIMRGBresult);\nextern IMRGB *calcSubAbs(IMRGB *pIMRGBminuend, IMRGB *pIMRGBsubtracter);\n\ninline\nIMRGB::IMRGB(void):\n _width(0),\n _height(0),\n _data(NULL)\n{\n}\n\ninline\nIMRGB::IMRGB(char *fname):\n _width(0),\n _height(0),\n _data(NULL)\n{\n loadFromPPM(fname);\n}\n\ninline\nIMRGB::IMRGB(FILE *fp):\n _width(0),\n _height(0),\n _data(NULL)\n{\n loadFromPPM(fp);\n}\n\ninline int\nIMRGB::width(void)\n{\n return(_width);\n}\n\ninline int\nIMRGB::height(void)\n{\n return(_height);\n}\n\ninline Uchar &\nIMRGB::r(int i, int j)\n{\n return(_data[(i * width() + j) * 3 + 2]);\n}\ninline Uchar &\nIMRGB::g(int i, int j)\n{\n return(_data[(i * width() + j) * 3 + 1]);\n}\ninline Uchar &\nIMRGB::b(int i, int j)\n{\n return(_data[(i * width() + j) * 3 + 0]);\n}\ninline Uchar\nIMRGB::v(ColorPlane cp, int i, int j)\n{\n switch(cp)\n {\n case\tCP_ALL:\n return (Uchar)(((Ulong)r(i,j) + (Ulong)g(i,j) + (Ulong)b(i,j)) / 3);\n case\tCP_RED:\n return r(i,j);\n case\tCP_GREEN:\n return g(i,j);\n case\tCP_BLUE:\n return b(i,j);\n case CP_AVE:\n return r(i,j);\n }\n return 0;\n}\n\ninline Uchar *\nIMRGB::data(void)\n{\n return _data;\n}\n\ninline Uchar *\nIMRGB::dataOfLine(int h)\n{\n return(data() + width() * h * 3);\n}\n\ninline void\nIMRGB::clear(void)\n{\n memset(_data, 0, height() * width() * 3);\n}\n\ninline void\nIMRGB::fillWhite(void)\n{\n memset(_data, 255, height() * width() * 3);\n}\n\ninline void IMRGB::drawPoint(int i, int j, COLORREF cr_v)\n{\n drawPoint(i, j, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v));\n}\ninline void IMRGB::drawPoint(int i, int j, COLORREF cr_v, int scale)\n{\n drawPoint(i, j, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v), scale);\n}\ninline void IMRGB::drawCross(int i, int j, COLORREF cr_v, int scale)\n{\n drawCross(i, j, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v), scale);\n}\ninline void IMRGB::drawCrossX(int i, int j, COLORREF cr_v, int scale)\n{\n drawCrossX(i, j, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v), scale);\n}\ninline void IMRGB::drawPoint2(int i, int j, COLORREF cr_v)\n{\n drawPoint2(i, j, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v));\n}\ninline void IMRGB::drawLine(int si, int sj, int ei, int ej, COLORREF cr_v)\n{\n drawLine(si, sj, ei, ej, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v));\n}\ninline void IMRGB::drawLineWithClipping(int si, int sj, int ei, int ej, COLORREF cr_v, int imin, int imax, int jmin, int jmax)\n{\n drawLineWithClipping(si, sj, ei, ej, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v), imin, imax, jmin, jmax);\n}\ninline void IMRGB::drawLine2(int si, int sj, int ei, int ej, COLORREF cr_v)\n{\n drawLine2(si, sj, ei, ej, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v));\n}\ninline void IMRGB::drawDashLine(int si, int sj, int ei, int ej, COLORREF cr_v, int dash_step)\n{\n drawDashLine(si, sj, ei, ej, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v), dash_step);\n}\ninline void IMRGB::drawRect(int uli, int ulj, int h, int w, COLORREF cr_v)\n{\n drawRect(uli, ulj, h, w, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v));\n}\ninline void IMRGB::drawRectFill(int uli, int ulj, int h, int w, COLORREF cr_v)\n{\n drawRectFill(uli, ulj, h, w, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v));\n}\ninline void IMRGB::drawRectFill(int uli, int ulj, int h, int w, COLORREF cr_v, double transparency)\n{\n drawRectFill(uli, ulj, h, w, GetRValue(cr_v), GetGValue(cr_v), GetBValue(cr_v), transparency);\n}\n//#ifndef\tFOR_TMC\n//inline void IMRGB::drawRect(AwBox *pBox, int r, int g, int b)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRect(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), r, g, b);\n//}\n//inline void IMRGB::drawRect(AwBox *pBox, COLORREF cr_v)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRect(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), cr_v);\n//}\n//inline void IMRGB::drawRectFill(AwBox *pBox, int r, int g, int b)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRectFill(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), r, g, b);\n//}\n//inline void IMRGB::drawRectFill(AwBox *pBox, int r, int g, int b, double transparency)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRectFill(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), r, g, b, transparency);\n//}\n//inline void IMRGB::drawRectFill(AwBox *pBox, COLORREF cr_v)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRectFill(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), cr_v);\n//}\n//inline void IMRGB::drawRectFill(AwBox *pBox, COLORREF cr_v, double transparency)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRectFill(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), cr_v, transparency);\n//}\n//inline void IMRGB::drawRectWithRotation(AwBox *pBox, int r, int g, int b, double rad)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRectWithRotation(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), r, g, b, rad);\n//}\n//inline void IMRGB::drawRectFillWithRotation(AwBox *pBox, int r, int g, int b, double rad)\n//{\n//\tif(pBox == NULL)\treturn;\n//\tdrawRectFillWithRotation(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth(), r, g, b, rad);\n//}\n//\n//inline IMRGB *IMRGB::partial(AwBox *pBox)\n//{\n//\tif(pBox == NULL)\treturn NULL;\n//\treturn partial(pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth());\n//}\n//inline IMRGB *IMRGB::partial(IMRGB *dst, AwBox *pBox)\n//{\n//\tif(pBox == NULL)\treturn NULL;\n//\treturn partial(dst, pBox->getTop(), pBox->getLeft(), pBox->getHeight(), pBox->getWidth());\n//}\n//#endif\tFOR_TMC\n\nextern void sortUchar(Uchar *a_ucBuf, int iNumber);\n#endif\n"
},
{
"alpha_fraction": 0.5275229215621948,
"alphanum_fraction": 0.5458715558052063,
"avg_line_length": 27.09677505493164,
"blob_id": "546b2171bde5201a570060efae5c4996141b38da",
"content_id": "c41ad1e4b94d3cfd9248da87a6cb80a1e9504651",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 946,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 31,
"path": "/athena/core/arm/Common/include/route/heading.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n#include <vector>\n\n//转化头指向\ndouble cau_derta_heading(\n double current_heading,\n double head_c);\n//求取点到直线的距离;带方向\ndouble calculate_sign_and_value_offset(\n double x, double y,\n double x_a, double y_a, double x_b, double y_b);\n// 求取相关点的头指向。\nint get_heading_h(double x1, double y1,\n double x2, double y2, double& h2,\n double& h_r, int num);\n\n// 求取相关点的头指向。\nint get_heading(double x1, double y1,\n double x2, double y2, double& h2);\n\nvoid derta_heading(double h1,\n double h2,\n double& derta_h);\n\nint get_steering_angle_h(double h1, double h2,\n double s, double& st, double& st_r);\n\nint get_steering_angle(double h1, double h2,\n double s, double& st);\n\n"
},
{
"alpha_fraction": 0.528265118598938,
"alphanum_fraction": 0.5575048923492432,
"avg_line_length": 11.512195587158203,
"blob_id": "ca2261a4451b8dc8ba58137569866065edf66348",
"content_id": "4cdecebfeb79cb8586be11527e1849e2bc23e952",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 519,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 41,
"path": "/athena/core/x86/Control/include/scheduler.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file Scheduler.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef SCHEDULER_H_\n#define SCHEDULER_H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n\n/**\n * @class Scheduler\n *\n * @brief 调节器.\n */\nclass Scheduler\n{\n public:\n Scheduler()\n {\n speed_ = 0.0;\n ratio_ = 0.0;\n }\n ~Scheduler() = default;\n double speed_;\n double ratio_;\n};\n}\n}\n\n#endif // CHASSIS_H_\n"
},
{
"alpha_fraction": 0.5227775573730469,
"alphanum_fraction": 0.5448916554450989,
"avg_line_length": 16.936508178710938,
"blob_id": "9541a7a8eaecec91dcb7d7b0b67d2613b9990fa4",
"content_id": "11703b59ebd08d58d79b92202604d9293a0525f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2338,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 126,
"path": "/athena/core/arm/Control/include/common/map_matching/navi_point.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <vector>\n#include <iostream>\n\nusing namespace std;\n\n\n//tips 删除没用到的参数\nclass navi_point\n{\n\npublic:\n double position_x_; // 坐标.x\n double position_y_; // 坐标.y\n double position_z_; // 坐标.z\n\n double lat_;\n double lon_;\n double height_;\n\n double speed_;\n double lateral_speed_;\n double longitudinal_speed_;\n double down_speed_;\n\n double roll_;\n double pitch_;\n double heading_;\n\n double accelerataion_;\n double lateral_accelerate_;\n double longitudinal_accelerate_;\n double down_accelerate_;\n\n double roll_speed_;\n double pitch_speed_;\n double heading_speed_;\n\n double steering_angle_;\n double steering_angle_speed_;\n\n double speed_desired_Uxs_;\n double curvature_;\n double acceleration_desired_Axs_;\n\npublic:\n int point_no_;\n double pos_gps_time_; // get_current_time\n int32_t cycle_tag_; // 整周计数;整体增加360×cycle_tag\n\n double s_;\n double r_;\n double lateral_offset_;\n double k_s_;\n double d_lateral_offset_;\n double d2_lateral_offset_;\n\n double width_; //车道宽度\n int32_t type_;\n int64_t lane_id_;\n int32_t left_lane_num_;\n int32_t right_lane_num_;\n\n int control_mark_;\n double suggest_kp_;\n int8_t p_g_;\n double tar_speed_;\t\t//该点的档位\n double p_h_; //航向角\n double p_k_;\t\t\t\t//该点曲率\npublic:\n\n\n navi_point()\n {\n position_x_ = 0;\n position_y_ = 0;\n position_z_ = 0;\n\n lat_ = 0;\n lon_ = 0;\n height_ = 0;\n\n speed_ = 0;\n lateral_speed_ = 0;\n longitudinal_speed_ = 0;\n down_speed_ = 0;\n\n roll_ = 0;\n pitch_ = 0;\n heading_ = 0;\n\n accelerataion_ = 0;\n lateral_accelerate_ = 0;\n longitudinal_accelerate_ = 0;\n down_accelerate_ = 0;\n\n roll_speed_ = 0;\n pitch_speed_ = 0;\n heading_speed_ = 0;\n\n steering_angle_ = 0;\n steering_angle_speed_ = 0;\n\n speed_desired_Uxs_ = 0;\n curvature_ = 0;\n acceleration_desired_Axs_ = 0;\n\n point_no_ = 0;\n s_ = 0;\n r_ = 0;\n lateral_offset_ = 0;\n d_lateral_offset_ = 0;\n d2_lateral_offset_ = 0;\n k_s_ = 0;\n\n control_mark_ = 0;\n suggest_kp_ = 2;\n }\n\n ~navi_point()\n {\n\n\n }\n};\n\n"
},
{
"alpha_fraction": 0.7085057497024536,
"alphanum_fraction": 0.7094252705574036,
"avg_line_length": 32.984375,
"blob_id": "7019a7cf180d6772407c33d7072ef685a5ad058d",
"content_id": "b6b31ea2fb2e9b63118a7268add36c822b6fc058",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2195,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 64,
"path": "/athena/examples/LCM/Singlecar/control/common/timer_app.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"local_timer.h\"\n#include <boost/bind.hpp>\n#include <vector>\n#include \"local_timer.h\"\n#include <vector>\n\n using namespace std;\n\ntemplate<class MessageHandlerClass>\nclass TimerApp\n{\n typedef void (MessageHandlerClass::*HandlerMethod)();\n\n typedef struct{\n int interval_;\n HandlerMethod handler_method_; //回调函数指针\n MessageHandlerClass* handler_; //调用对象\n local_timer *timer;\n }TimerBase;\n\n public:\n static void AddTimer(int interval,HandlerMethod handler_method,MessageHandlerClass *handler);\n\n private:\n static std::vector<TimerBase> timer_vec_;\n static void OnTimer(int index);\n static void run();\n};\n\ntemplate<class MessageHandlerClass> std::vector<typename TimerApp<MessageHandlerClass>::TimerBase> TimerApp<MessageHandlerClass>::timer_vec_;\n//template<class MessageHandlerClass> int TimerApp<MessageHandlerClass>::interval_ = 0;\n//template<class MessageHandlerClass> typename TimerApp<MessageHandlerClass>::HandlerMethod TimerApp<MessageHandlerClass>::handler_method_ = NULL;\n//template<class MessageHandlerClass> MessageHandlerClass * TimerApp<MessageHandlerClass>::handler_ = NULL;\n\n template<class MessageHandlerClass>\n void TimerApp<MessageHandlerClass>::OnTimer(int index)\n {\n HandlerMethod handler_method_= timer_vec_[index].handler_method_;\n MessageHandlerClass *handler = timer_vec_[index].handler_;\n (handler->*handler_method_)();\n }\n\ntemplate<class MessageHandlerClass>\nvoid TimerApp<MessageHandlerClass>::AddTimer(int interval,HandlerMethod handler_method,MessageHandlerClass *handler)\n{\n TimerBase timerbase;\n timerbase.interval_ = interval;\n timerbase.handler_ = handler;\n timerbase.handler_method_ = handler_method;\n timerbase.timer = new local_timer();\n timer_vec_.push_back(timerbase);\n run();\n}\n\ntemplate<class MessageHandlerClass>\nvoid TimerApp<MessageHandlerClass>::run()\n{\n int index;\n for(auto it = timer_vec_.begin();it != timer_vec_.end();++it)\n {\n index = it - timer_vec_.begin();\n it -> timer -> StartTimer(it -> interval_,std::bind(OnTimer,std::placeholders::_1),index);\n }\n}\n"
},
{
"alpha_fraction": 0.577225387096405,
"alphanum_fraction": 0.5835989117622375,
"avg_line_length": 25.011049270629883,
"blob_id": "cd89547fa719a9567927a8206a2888ae273aaeb5",
"content_id": "22070b39223664af3ef4189683d56c6c42371e93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5291,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 181,
"path": "/athena/core/x86/Camera/lane_detect/include/Matrix/Matrix.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#ifndef __MATRIX_H__\n#define __MATRIX_H__\n\n#include <iostream>\n#include <fstream>\n#include <sstream>\n#include <vector>\n#include <string>\n\nusing std::vector;\nusing std::string;\nusing std::cout;\nusing std::cin;\nusing std::istream;\nusing std::ostream;\n\n// 任意类型矩阵类\ntemplate <typename Object>\nclass MATRIX\n{\npublic:\n\texplicit MATRIX() : array(0) {}\n\n\tMATRIX(int rows, int cols) :array(rows)\n\t{\n\t\tfor (int i = 0; i < rows; ++i)\n\t\t{\n\t\t\tarray[i].resize(cols);\n\t\t}\n\t}\n\n\tMATRIX(const MATRIX<Object>& m) { *this = m; }\n\n\tvoid resize(int rows, int cols); // 改变当前矩阵大小\n\tbool push_back(const vector<Object>& v); // 在矩阵末尾添加一行数据\n\tvoid swap_row(int row1, int row2); // 将换两行的数据\n\n\tint rows() const { return array.size(); }\n\tint cols() const { return rows() ? (array[0].size()) : 0; }\n\tbool empty() const { return rows() == 0; } // 是否为空\n\tbool square() const { return (!(empty()) && rows() == cols()); } // 是否为方阵\n\n\n\tconst vector<Object>& operator[](int row) const { return array[row]; } //[]操作符重载 \n\tvector<Object>& operator[](int row) { return array[row]; }\n\nprotected:\n\tvector< vector<Object> > array;\n};\n\n// 改变当前矩阵大小\ntemplate <typename Object>\nvoid MATRIX<Object>::resize(int rows, int cols)\n{\n\tint rs = this->rows();\n\tint cs = this->cols();\n\n\tif (rows == rs && cols == cs)\n\t{\n\t\treturn;\n\t}\n\telse if (rows == rs && cols != cs)\n\t{\n\t\tfor (int i = 0; i < rows; ++i)\n\t\t{\n\t\t\tarray[i].resize(cols);\n\t\t}\n\t}\n\telse if (rows != rs && cols == cs)\n\t{\n\t\tarray.resize(rows);\n\t\tfor (int i = rs; i < rows; ++i)\n\t\t{\n\t\t\tarray[i].resize(cols);\n\t\t}\n\t}\n\telse\n\t{\n\t\tarray.resize(rows);\n\t\tfor (int i = 0; i < rows; ++i)\n\t\t{\n\t\t\tarray[i].resize(cols);\n\t\t}\n\t}\n}\n\n// 在矩阵末尾添加一行\ntemplate <typename Object>\nbool MATRIX<Object>::push_back(const vector<Object>& v)\n{\n\tif (rows() == 0 || cols() == (int)v.size())\n\t{\n\t\tarray.push_back(v);\n\t}\n\telse\n\t{\n\t\treturn false;\n\t}\n\n\treturn true;\n}\n\n// 将换两行\ntemplate <typename Object>\nvoid MATRIX<Object>::swap_row(int row1, int row2)\n{\n\tif (row1 != row2 && row1 >= 0 &&\n\t\trow1 < rows() && row2 >= 0 && row2 < rows())\n\t{\n\t\tvector<Object>& v1 = array[row1];\n\t\tvector<Object>& v2 = array[row2];\n\t\tvector<Object> tmp = v1;\n\t\tv1 = v2;\n\t\tv2 = tmp;\n\t}\n}\n\n// 矩阵转置\ntemplate <typename Object>\nconst MATRIX<Object> trans(const MATRIX<Object>& m)\n{\n\tMATRIX<Object> ret;\n\tif (m.empty()) return ret;\n\n\tint row = m.cols();\n\tint col = m.rows();\n\tret.resize(row, col);\n\n\tfor (int i = 0; i < row; ++i)\n\t{\n\t\tfor (int j = 0; j < col; ++j)\n\t\t{\n\t\t\tret[i][j] = m[j][i];\n\t\t}\n\t}\n\n\treturn ret;\n}\n\n//////////////////////////////////////////////////////////\n// double类型矩阵类,用于科学计算\n// 继承自MATRIX类\n// 实现常用操作符重载,并实现计算矩阵的行列式、逆以及LU分解\nclass Matrix :public MATRIX<double>\n{\npublic:\n\tMatrix() :MATRIX<double>() {}\n\tMatrix(int c, int r) :MATRIX<double>(c, r) {}\n\tMatrix(const Matrix& m) { *this = m; }\n\n\tconst Matrix& operator+=(const Matrix& m);\n\tconst Matrix& operator-=(const Matrix& m);\n\tconst Matrix& operator*=(const Matrix& m);\n\tconst Matrix& operator/=(const Matrix& m);\n};\n\nbool operator==(const Matrix& lhs, const Matrix& rhs); // 重载操作符==\nbool operator!=(const Matrix& lhs, const Matrix& rhs); // 重载操作符!=\nconst Matrix operator+(const Matrix& lhs, const Matrix& rhs); // 重载操作符+\nconst Matrix operator-(const Matrix& lhs, const Matrix& rhs); // 重载操作符-\nconst Matrix operator*(const Matrix& lhs, const Matrix& rhs); // 重载操作符*\nconst Matrix operator/(const Matrix& lhs, const Matrix& rhs); // 重载操作符/\nconst double det(const Matrix& m); // 计算行列式\nconst double det(const Matrix& m, int start, int end); // 计算子矩阵行列式\nconst Matrix abs(const Matrix& m); // 计算所有元素的绝对值\nconst double max(const Matrix& m); // 所有元素的最大值\nconst double max(const Matrix& m, int& row, int& col); // 所有元素中的最大值及其下标\nconst double min(const Matrix& m); // 所有元素的最小值\nconst double min(const Matrix& m, int& row, int& col); // 所有元素的最小值及其下标\nconst Matrix trans(const Matrix& m); // 返回转置矩阵\nconst Matrix submatrix(const Matrix& m, int rb, int re, int cb, int ce); // 返回子矩阵\nconst Matrix inverse(const Matrix& m); // 计算逆矩阵\nconst Matrix LU(const Matrix& m); // 计算方阵的LU分解\nconst Matrix readMatrix(istream& in = std::cin); // 从指定输入流读入矩阵\nconst Matrix readMatrix(string file); // 从文本文件读入矩阵\nconst Matrix loadMatrix(string file); // 从二进制文件读取矩阵\nvoid printMatrix(const Matrix& m, ostream& out = std::cout); // 从指定输出流打印矩阵\nvoid printMatrix(const Matrix& m, string file); // 将矩阵输出到文本文件\nvoid saveMatrix(const Matrix& m, string file); // 将矩阵保存为二进制文件\n\n#endif"
},
{
"alpha_fraction": 0.3817863464355469,
"alphanum_fraction": 0.3817863464355469,
"avg_line_length": 26.190475463867188,
"blob_id": "064b1bf9297b14834247fc2e2a02279fe42f5ff3",
"content_id": "8955cf9270cbe51a46ada19e99cabec65be62927",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1142,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 42,
"path": "/athena/core/arm/Planning/directoryList.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "|-- arm\n |-- include\n | |-- collision_check\n | | |-- collision_check.h\n | |-- common\n | | |-- LocalGeographicCS.hpp\n | | |-- car_state.h\n | | |-- color_util.h\n | | |-- convert_coordinates.hpp\n | | |-- cs.h\n | | |-- enum_list.h\n | | |-- math_util.h\n | | |-- navi_point.h\n | | |-- path.h\n | | |-- path_tools.h\n | | |-- point.h\n | | |-- rect.h\n | |-- map_matching\n | | |-- map_matching.h\n | |-- park\n | | |-- park.h\n | |-- planning\n | | |-- planning.h\n | | |-- planning_output.h\n | | |-- planning_param.h\n | | |-- route_data.h\n | |-- spline\n | | |-- math_tools.h\n | | |-- quartic_spline.h\n | | |-- quintic_spline.h\n | | |-- spline.h\n | |-- trajectory\n | | |-- trajectory.h\n | | |-- trajectory_sets.h\n | |-- vehicle_dynamic\n | |-- cau_heading_steering.h\n | |-- circle.h\n | |-- heading.h\n | |-- nearest_point_on_spline.h\n | |-- steering_angle.h\n |-- lib\n |-- libplanning.so\n"
},
{
"alpha_fraction": 0.7243816256523132,
"alphanum_fraction": 0.7514722943305969,
"avg_line_length": 20.769229888916016,
"blob_id": "9cf91a7c172faa542e68c4e00af5dbe5d0ec424f",
"content_id": "745c2ce876ada5fbe11d50e4e4c1ce4ac683ec5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": true,
"language": "Shell",
"length_bytes": 849,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 39,
"path": "/third_party/install.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\nset -e\nbasepath=`dirname $(pwd)/$0`\ncd $HOME\ninstall_dir=\"$basepath\"\ncd $install_dir\nunzip lcm-1.3.1.zip\ncd $install_dir/lcm-1.3.1\nsudo cp /usr/share/misc/config* .\nsudo ./configure\nsudo make -j4\nsudo make install\t\necho \"lcm successful completion\"\ncd $install_dir\nunzip glog-master.zip\ncd $install_dir/glog-master\nsudo cp /usr/share/misc/config* .\nsudo ./configure\nsudo make -j4\nsudo make install\necho \"glog successful completion\"\ncd $install_dir\ntar -xzvf libsodium-1.0.3.tar.gz\ncd $install_dir/libsodium-1.0.3\nsudo cp /usr/share/misc/config* .\nsudo ./configure\nsudo make -j4\nsudo make install\necho \"libsodium successful completion\"\ncd $install_dir\ntar -xzvf zeromq-4.1.2.tar.gz\ncd $install_dir/zeromq-4.1.2\nsudo ./configure\nsudo make -j4\nsudo make install\ncd /usr/local/lib\nsudo ldconfig\ncd $install_dir\necho \"zmq successful completion\"\n"
},
{
"alpha_fraction": 0.4590336084365845,
"alphanum_fraction": 0.48949578404426575,
"avg_line_length": 13.208954811096191,
"blob_id": "ae3cf4b3cddec6a6bfd5628ba2a6657aa5de6c87",
"content_id": "941bc9aa49a540d3b9017db58c5566f09ea5e33e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1040,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 67,
"path": "/athena/core/x86/Control/include/nav_points.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file nav_points.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef NAVPOINTS_H_\n#define NAVPOINTS_H_\n\n#include <iostream>\n\nusing namespace std;\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class NavPoints\n *\n * @brief motion Trajectory points.\n */\nclass NavPoints\n{\n public:\n NavPoints()\n {\n gps_time_ = 0.0;\n p_x_ = 0.0;\n p_y_ = 0.0;\n s_ = 0.0;\n p_v_ = 0.0;\n p_a_ = 0.0;\n p_h_ = 0.0;\n p_k_ = 0.0;\n p_g_ = 0;\n }\n ~NavPoints() = default;\n\n ///GPS 时间\n double gps_time_;\n ///x值\n double p_x_;\n ///y值\n double p_y_;\n ///里程值\n double s_;\n ///到达该点的速度\n double p_v_;\n ///到达该点的加速度\n double p_a_;\n ///到达该点的航向角\n double p_h_;\n ///到达该点的曲率\n double p_k_;\n ///到达该点的档位\n int8_t p_g_;\n};\n}\n}\n\n#endif // NAVPOINTS_H_\n"
},
{
"alpha_fraction": 0.5963756442070007,
"alphanum_fraction": 0.6285008192062378,
"avg_line_length": 21.481481552124023,
"blob_id": "a37dac73ab93620fcb58a028813f62e6d3cdd547",
"content_id": "429c0f3b9b526436ba0b0ad6bfce42fecb072d93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1402,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 54,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/alarm/nad_ui_alarm.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_ui_alarm.h\n * 创建者:张毅00151602\n * 时 间:2016-03-28\n * 描 述:向OCT和VUI发告警\n-------------------------------------------------------*/\n#ifndef _NAD_UI_ALARM_H\n#define _NAD_UI_ALARM_H\n\n#include \"nad_base.h\"\n#include \"../../msg/nad_msg.h\"\n\n\n\n\n\n\n//默认的告警间隔是10秒\n#define ALARM_TTL 10000\n\n#ifdef _NAD_CSU_\n\n#include \"../../../csu/csu_planning/csu_zmq.h\"\n\nextern csu_zmq *g_csu_zmq;\n\n//向oct发告警,封装了cu_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_oct(string oct_name, int32_t alarm_level, string alarm, string mask = \"\", int64_t ttl = ALARM_TTL);\n\n#endif\n\n#ifdef _NAD_RSU_\n\n#include \"../../../rsu/rsu_planning/rsu_zmq.h\"\n\nextern rsu_zmq *g_rsu_zmq;\n\n//向oct发告警,封装了rc_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_oct(int32_t alarm_level, string alarm, string mask = \"\", int64_t ttl = ALARM_TTL);\n\n//向vui发告警,封装了ro_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_vui(string obu_name, int32_t alarm_level, string alarm, string mask = \"\", int64_t ttl = ALARM_TTL);\n\n#endif\n\n#ifdef _NAD_OBU_\n\n//向vui发告警,封装了ou_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_vui(int32_t alarm_level, string alarm, string mask = \"\", int64_t ttl = ALARM_TTL);\n\n#endif\n\n\n#endif\n"
},
{
"alpha_fraction": 0.5830903649330139,
"alphanum_fraction": 0.6122449040412903,
"avg_line_length": 17.052631378173828,
"blob_id": "21c9a10da368341dfdad2bcd7fa447eb7691d507",
"content_id": "ca42a0adaea5313034aa357f1ab55ff4b80963fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 357,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 19,
"path": "/athena/examples/LCM/Singlecar/launch_car/bin/dd.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\n#杀掉某一个进程\nkillp()\n{\n PROCESS=`ps -ef|grep $1|grep -v grep|grep -v PPID|grep -v codeblocks|awk '{ print $2}'`\n for i in $PROCESS\n do\n echo \"Kill the $1 process [ $i ]\"\n kill -9 $i\n done\n}\n\nkillp \"obu_planning_60U5Z\"\nkillp \"sim_vui_DF001\"\nkillp \"control\"\nkillp \"sensor\"\nkillp \"planning_view\"\nkillp \"planning\"\n"
},
{
"alpha_fraction": 0.5582468509674072,
"alphanum_fraction": 0.5682429671287537,
"avg_line_length": 20.319671630859375,
"blob_id": "49666b908cc3a4eea18d1db6e0d19b5d265fbf24",
"content_id": "294d3007bf7a3650009bef0e538346c11cb2aa24",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2953,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 122,
"path": "/athena/examples/LCM/Singlecar/obu/src/obu/obu_planning/obu_planning.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:obu_planning.cpp\n * 创建者:李思政\n * 时 间:2016-04-03\n * 描 述:obu主进程,用于处理相关网元注册登录,交通信息,路径规划信息\n-------------------------------------------------------*/\n\n//头文件\n#include \"obu_planning.h\"\n#include \"pthread.h\"\n#include \"draw_obu_planning.h\"\n\n//进程公共全局变量\nobu_planning_starter *g_obu_planning = NULL;\n\n//构造函数\nobu_planning_starter::obu_planning_starter() : nad_starter()\n{\n obu.route = NULL;\n obu.route_draw[0] = NULL;\n obu.route_draw[1] = NULL;\n //obu.route->map_ = NULL;\n}\n\nobu_planning_starter::~obu_planning_starter()\n{\n if (obu.route != NULL)//yu\n {\n delete obu.route;\n }\n\n if (obu.route_draw != NULL)\n {\n delete[] obu.route_draw;\n }\n}\n\nvoid* ThreadFunction_map(void* param)\n{\n MyGLDispIni();\n glutMainLoop();\n return NULL;\n}\n\n//用于发消息\nNE_LCM *g_lcm = NULL;\n\n//自定义启动函数\nint obu_planning_starter::user_start()\n{\n //初始化lcm\n nad_config *tmp = get_config();\n if (tmp == NULL)\n {\n LOG(ERROR) << \"nad_starter::start(): get_config error\";\n return RET_ERROR;\n }\n g_lcm = new NE_LCM(get_config()->local.lcm_url);\n if (!g_lcm->good())\n {\n LOG(ERROR) << \"nad_starter::start(): g_lcm->good() error\";\n return RET_LCM_INIT_ERR;\n }\n\n //加载静态地图文件\n obu.route = new route::RouteBase(NAD_PATH + NAD_MAP_FILE);\n if (obu.route == NULL)\n {\n LOG(ERROR) << \"obu_planning_data::load_data(): obu.route (\" << NAD_MAP_FILE << \") return NULL\";\n }\n obu.route->transfer_.set_origin(NAD_PATH + NAD_MAP_SET_POINT);\n //pthread_create(&ptht_draw, NULL, &ThreadFunction_map, NULL);\n\n //启动定时器\n g_ltimer.add_timer(new obu_session_obu_timer(200));\n\n //设置obu name\n obu.name = get_config()->local.name;\n\n //注册消息处理句柄\n obu.reg_msg_to_lcm(g_lcm);\n //oct_manager.reg_msg_to_lcm(g_lcm);\n\n //返回成功\n return RET_OK;\n}\n\n//自定义退出函数\nvoid obu_planning_starter::user_stop()\n{\n delete g_obu_planning;\n delete g_lcm;\n}\n\n//自定义退出函数\nvoid obu_planning_starter::set_timer()\n{\n //异步lcm的时间间隔MIN_TIMER_MS=1毫秒\n struct timeval lcm_tv;\n\n //16-7-12 WFH update select会阻塞并修改tv参数的值,tv设置为0进行实验\n lcm_tv.tv_sec = 0;\n lcm_tv.tv_usec = MIN_TIMER_MS * 1000; //太小的话影响lcm收消息,太大的话无法实现密集触发的定时器\n if (g_lcm == NULL)\n {\n std::cout << \"g_lcm is NULL\" << std::endl;\n return;\n }\n g_lcm->async_handle(lcm_tv); //消息(网元内+网元间)\n}\n\n//主程序\nint main(int argc, char *argv[])\n{\n glutInit(&argc, argv);\n g_obu_planning = new obu_planning_starter();\n\n g_obu_planning->start(argc, argv);\n\n g_obu_planning->stop();\n return 0;\n}\n"
},
{
"alpha_fraction": 0.5521126985549927,
"alphanum_fraction": 0.5633803009986877,
"avg_line_length": 28.58333396911621,
"blob_id": "5888b3984fce3cc8d85364809cce5b13cd7849ad",
"content_id": "fe903d107f08000e0d2fabe596844a6186bfacd0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 355,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 12,
"path": "/athena/core/arm/Control/include/common/map_matching/steering_angle.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n#include \"../math_util.h\"\n#include \"point.h\"\n#include \"../path.h\"\n#include \"heading.h\"\n#include \"spline.h\"\n\ndouble cau_steering_angle_from_ks(tk::spline s_x, tk::spline s_y,\n double cs, double Ux, double& rr,\n double every1, double every2,int32_t k_trans);\n"
},
{
"alpha_fraction": 0.5436674356460571,
"alphanum_fraction": 0.5543022751808167,
"avg_line_length": 21.985185623168945,
"blob_id": "8e902f33089e0ada5660835a13e2827d26d347cc",
"content_id": "68d2031f5f0698000a01b7e81ccee759522a3286",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3365,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 135,
"path": "/athena/core/arm/Control/include/controller_output_alarm.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller_output_alarm.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_OUTPUT_ALARM_H_\n#define CONTROLLER_OUTPUT_ALARM_H_\n\n#include <vector>\n#include \"controller_config.h\"\n#include \"controller_alarm_code.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n\n/**\n * @class ControllerOutputAlarm\n *\n * @brief 控制警报输出.\n */\nclass ControllerOutputAlarm\n{\n public:\n/**\n * @brief 构造函数\n * @param[in] get_current_time_callback 获取当前时间回调函数.\n * @param[in] get_log_callback 日志记录回调函数.\n * @return true or false.\n */\n\n\n ControllerOutputAlarm(ControllerConfig *controller_config);\n ~ControllerOutputAlarm() = default;\n\n ///故障码格式\n typedef struct{\n ///故障码\n int32_t alarm_code;\n ///故障等级\n int32_t alarm_level;\n }AlarmInfo;\n ///报警表\n typedef struct{\n AlarmInfo alarm_info;\n ///发生时间\n Tm timer_stamp;\n }AlarmInfoTable;\n\n\n /**\n * @brief AddAlarm 添加报警码.\n * @param[in] code 警报码.\n * @return true or false.\n */\n bool AddAlarm(int32_t code);\n\n /**\n * @brief DelAlarm 删除报警码.\n * @param[in] code 警报码.\n * @return true or false.\n */\n bool DelAlarm(int32_t code);\n\n/**\n * @brief 清空故障表.\n * @param[in] void.\n * @return true or false.\n */\n bool ClearAlarmTable();\n\n/**\n * @brief FindAlarm 查找报警码.\n * @param[in] code 警报码.\n * @return -1 = 查找不到 其他 索引值.\n */\n int32_t FindAlarm(int32_t code);\n\n/**\n * @brief GetAlarmInfo 获取报警码信息.\n * @param[in] code 警报码.\n * @param[out] alarm_info 报警信息.\n * @return true or false.\n */\n bool GetAlarmInfo(int32_t code,AlarmInfoTable* alarm_info);\n\n /**\n * @brief 判断报警列表是否为空.\n * @return true or false.\n */\n bool IsEmpty();\n\n /**\n * @brief GetAlarmInfo 获取报警信息.\n * @param[out] alarm_list 报警信息.\n * @return void.\n */\n void GetAlarmTableInfo(std::vector<AlarmInfoTable> *alarm_list);\n\n private:\n ControllerConfig *controller_config_;\n ///报警个数上限\n const uint32_t limit_alarm_num_ = 9;\n\n //AlarmInfo[100] = {{OK,NORMAL},{CONTROLLER_OFF_TRACK_ERROR,ERROR}};\n ///报警信息列表\n AlarmInfo alarm_info_[9] = {\n {OK ,NORMAL},\n {CONTROLLER_OFF_TRACK_ERROR ,ERROR },\n {CONTROLLER_INIT_ERROR ,ERROR },\n {CONTROLLER_OFF_TRACK_ERROR ,ERROR },\n {CONTROLLER_TRAJECTORY_IS_SHORT_OR_LONG_ERROR,ERROR },\n {CONTROLLER_TRAJECTORY_ERROR ,HIGH_WARNING},\n {CONTROLLER_TRAJECTORY_SET_TIMEOUT ,HIGH_WARNING},\n {CONTROLLER_COMPUTE_TIMEOUT ,HIGH_WARNING},\n {CONTROLLER_COMPUTE_ERROR ,HIGH_WARNING},\n };\n\n std::vector<AlarmInfoTable> alarm_list_;\n std::vector<AlarmInfo> alarm_Info_list_;\n private:\n pthread_mutex_t lock_;\n};\n}\n}\n\n#endif // CONTROLLER_OUTPUT_H_\n"
},
{
"alpha_fraction": 0.47841888666152954,
"alphanum_fraction": 0.5779191255569458,
"avg_line_length": 27.03184700012207,
"blob_id": "82ed426151b6ef8765e15e4bd5a5b58d4e575de8",
"content_id": "8857cd649bbbf3ca23f18281f6cfd09e0df22efd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4402,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 157,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/acc/cs55/cs55_torque_speed_throttle_map.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#include \"cs55_torque_speed_throttle_map.h\"\n\nstruct table {\n\tdouble x[10]; // Variable: x\n\t\t\t\t\t\t\t\t\t\t // Referenced by: '<Root>/2-D Lookup Table'\n\n\tdouble y[9]; // Variable: y\n\t\t\t\t\t\t\t\t\t\t // Referenced by: '<Root>/2-D Lookup Table'\n\n\tdouble z[90]; // Variable: z\n\t\t\t\t\t\t\t\t\t\t // Referenced by: '<Root>/2-D Lookup Table'\n\n\tunsigned int DLookupTable_maxIndex[2]; // Computed Parameter: DLookupTable_maxIndex\n};\n\n\nstatic const table table_reference = {\n\t\t// Variable: x\n\t\t// Referenced by: '<Root>/2-D Lookup Table'\n\n\t\t{ 1.92, 3.48, 5.37, 7.1, 10.15, 13.15, 15.84, 18.35, 22.5, 23.93 },\n\n\t\t// Variable: y\n\t\t// Referenced by: '<Root>/2-D Lookup Table'\n\n\t\t{ 8.0, 9.0, 10.0, 12.0, 16.0, 17.0, 19.0, 23.0, 34.0 },\n\n\t\t// Variable: z\n\t\t// Referenced by: '<Root>/2-D Lookup Table'\n\n\t\t{ 1.5, 7.4, 8.0, 9.5, 10.2, 11.0, 11.5, 12.5, 13.9, 14.9, 1.5, 7.5, 8.5,\n\t\t10.0, 10.7, 11.5, 12.0, 13.0, 14.4, 15.5, 7.0, 7.8, 9.0, 10.5, 11.5, 12.0,\n\t\t12.5, 13.6, 15.0, 15.9, 7.3, 8.2, 9.3, 11.0, 12.0, 12.5, 12.8, 14.3, 15.5,\n\t\t16.5, 8.0, 9.0, 10.3, 11.5, 12.5, 13.1, 13.5, 14.8, 16.1, 17.3, 8.7, 9.8,\n\t\t10.5, 12.0, 13.0, 13.8, 14.2, 15.3, 16.5, 17.7, 9.5, 10.3, 10.9, 12.5,\n\t\t13.5, 14.5, 14.8, 15.8, 17.1, 19.5, 10.0, 10.7, 11.4, 14.0, 14.5, 15.5,\n\t\t16.5, 17.5, 18.5, 20.5, 15.5, 16.5, 17.5, 18.5, 19.5, 20.5, 21.5, 22.5,\n\t\t23.5, 24.5 },\n\n\t\t// Computed Parameter: DLookupTable_maxIndex\n\t\t// Referenced by: '<Root>/2-D Lookup Table'\n\n\t\t{ 9U, 8U }\n\t}; // Modifiable parameters\n\n\n\ndouble look2_binlxpw(double u0, double u1, const double bp0[], const double bp1[],\n\tconst double table[], const unsigned int maxIndex[], unsigned int\n\tstride)\n{\n\tdouble frac;\n\tunsigned int bpIndices[2];\n\tdouble fractions[2];\n\tdouble yL_1d;\n\tunsigned int iRght;\n\tunsigned int bpIdx;\n\tunsigned int iLeft;\n\n\t// Lookup 2-D\n\t// Search method: 'binary'\n\t// Use previous index: 'off'\n\t// Interpolation method: 'Linear'\n\t// Extrapolation method: 'Linear'\n\t// Use last breakpoint for index at or above upper limit: 'off'\n\t// Remove protection against out-of-range input in generated code: 'off'\n\n\t// Prelookup - Index and Fraction\n\t// Index Search method: 'binary'\n\t// Extrapolation method: 'Linear'\n\t// Use previous index: 'off'\n\t// Use last breakpoint for index at or above upper limit: 'off'\n\t// Remove protection against out-of-range input in generated code: 'off'\n\n\tif (u0 <= bp0[0U]) {\n\t\tiLeft = 0U;\n\t\tfrac = (u0 - bp0[0U]) / (bp0[1U] - bp0[0U]);\n\t}\n\telse if (u0 < bp0[maxIndex[0U]]) {\n\t\t// Binary Search\n\t\tbpIdx = maxIndex[0U] >> 1U;\n\t\tiLeft = 0U;\n\t\tiRght = maxIndex[0U];\n\t\twhile (iRght - iLeft > 1U) {\n\t\t\tif (u0 < bp0[bpIdx]) {\n\t\t\t\tiRght = bpIdx;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tiLeft = bpIdx;\n\t\t\t}\n\n\t\t\tbpIdx = (iRght + iLeft) >> 1U;\n\t\t}\n\n\t\tfrac = (u0 - bp0[iLeft]) / (bp0[iLeft + 1U] - bp0[iLeft]);\n\t}\n\telse {\n\t\tiLeft = maxIndex[0U] - 1U;\n\t\tfrac = (u0 - bp0[maxIndex[0U] - 1U]) / (bp0[maxIndex[0U]] - bp0[maxIndex[0U]\n\t\t\t- 1U]);\n\t}\n\n\tfractions[0U] = frac;\n\tbpIndices[0U] = iLeft;\n\n\t// Prelookup - Index and Fraction\n\t// Index Search method: 'binary'\n\t// Extrapolation method: 'Linear'\n\t// Use previous index: 'off'\n\t// Use last breakpoint for index at or above upper limit: 'off'\n\t// Remove protection against out-of-range input in generated code: 'off'\n\n\tif (u1 <= bp1[0U]) {\n\t\tiLeft = 0U;\n\t\tfrac = (u1 - bp1[0U]) / (bp1[1U] - bp1[0U]);\n\t}\n\telse if (u1 < bp1[maxIndex[1U]]) {\n\t\t// Binary Search\n\t\tbpIdx = maxIndex[1U] >> 1U;\n\t\tiLeft = 0U;\n\t\tiRght = maxIndex[1U];\n\t\twhile (iRght - iLeft > 1U) {\n\t\t\tif (u1 < bp1[bpIdx]) {\n\t\t\t\tiRght = bpIdx;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tiLeft = bpIdx;\n\t\t\t}\n\n\t\t\tbpIdx = (iRght + iLeft) >> 1U;\n\t\t}\n\n\t\tfrac = (u1 - bp1[iLeft]) / (bp1[iLeft + 1U] - bp1[iLeft]);\n\t}\n\telse {\n\t\tiLeft = maxIndex[1U] - 1U;\n\t\tfrac = (u1 - bp1[maxIndex[1U] - 1U]) / (bp1[maxIndex[1U]] - bp1[maxIndex[1U]\n\t\t\t- 1U]);\n\t}\n\n\n\tbpIdx = iLeft * stride + bpIndices[0U];\n\tyL_1d = (table[bpIdx + 1U] - table[bpIdx]) * fractions[0U] + table[bpIdx];\n\tbpIdx += stride;\n\treturn (((table[bpIdx + 1U] - table[bpIdx]) * fractions[0U] + table[bpIdx]) -\n\t\tyL_1d) * frac + yL_1d;\n}\n\n\ndouble CS55GetAccValue(double speed, double torque)\n{\n\treturn look2_binlxpw(speed,\n\t\ttorque, *(double(*)[10])&\n\t\ttable_reference.x[0], *(double(*)[9])&table_reference.y[0],\n\t\t*(double(*)[90])&table_reference.z[0], *(unsigned int(*)[2])&\n\t\ttable_reference.DLookupTable_maxIndex[0], 10U);\n}\n"
},
{
"alpha_fraction": 0.46287301182746887,
"alphanum_fraction": 0.47397640347480774,
"avg_line_length": 30.282608032226562,
"blob_id": "d7d804ccf38b9e203bf323d5e0414b623a34d147",
"content_id": "265aff3b80e5e21f4722816b93cd87db95b5f511",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1481,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 46,
"path": "/athena/core/arm/Planning/include/vehicle_dynamic/heading.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n#include <vector>\n#include \"spline/spline.h\"\n\ndouble calculate_R(double steering,\n double Ux);\n\nvoid derta_heading(double h1,\n double h2,\n double& derta_h);\n\ndouble cau_delta_heading(\n double current_heading,\n double head_c);\n\n// 求取相关点的头指向。\nint get_heading_h(double x1, double y1,\n double x2, double y2, double& h2,\n double& h_r, int num);\n\n// 求取相关点的头指向。\nint get_heading(double x1, double y1,\n double x2, double y2, double& h2);\nint get_heading(double dx, double dy, double& h);\n\nint get_steering_angle_h(double h1, double h2,\n double s, double& st, double& st_r);\n\nint get_steering_angle(double h1, double h2,\n double s, double& st);\n\ndouble cau_heading_angle_from_ks(tk::spline s_x,\n tk::spline s_y,\n double cs,\n double every);\ndouble cau_heading_angle_from_ks(tk::spline s_x,\n tk::spline s_y,\n double cs);\n\ndouble cau_heading_angle_from_circle_ks(tk::spline s_x,\n tk::spline s_y,\n double cs,\n double every,\n double zero_offset);\n\n\n"
},
{
"alpha_fraction": 0.6519452929496765,
"alphanum_fraction": 0.6708727478981018,
"avg_line_length": 13.646153450012207,
"blob_id": "493467bba741bfc1c5126e94d4c5dc56cd898ed2",
"content_id": "182298a55615301c4d4f8753e08b24b0fd7a80a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1205,
"license_type": "no_license",
"max_line_length": 174,
"num_lines": 65,
"path": "/athena/examples/ROS/src/Perception/lane_detect/README.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "## lane_detect\n\n### Introduction\n\n这是一个用来检测车道线的ROS包,通过订阅图像消息进行车道线检测,发布车道线检测渲染的结果以及离散的车道线数据点。\n\n订阅的消息如下:\n\n- /usb_cam/image_raw **[sensor_msgs/Image]**\n\n发布的消息如下:\n\n- /usb_cam/image_lane_detected **[sensor_msgs/Image]**\n- /laneDetectQuality **[lane_detect/LaneDeectResult]**\n\n发布的服务如下:\n\n- laneDetect **[lane_detect/LaneDetector]**\n\n---\n\n\n\n### Build\n\n#### Prerequisition\n\n该软件包依赖**[ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu)**,**OpenCV 3.0**及以上 以及**[LCM](https://github.com/lcm-proj/lcm/releases/download/v1.4.0/lcm-1.4.0.zip)**\n\n- ubuntu 16.04\n- ROS kinetic\n- OpenCV 3.0 \n- lcm\n\n#### compile\n\n```shell\n$cd your_catkin_ws\n$catkin_make\n```\n\n\n\n---\n\n### Usage\n\n- 离线rosbag测试\n\n 1. 下载[rosbag](www.baidu.com)\n 2. 输入下面的命令\n\n ```shell\n $rosbag play XX.bag\n $roslaunch lane_detect lane_detect.launch\n ```\n\n- 在线测试\n\n\n---\n\n### Acknologement\n\n如果想了解更多信息,请访问[环宇智行](http://www.in-driving.com/)。"
},
{
"alpha_fraction": 0.5694444179534912,
"alphanum_fraction": 0.5856481194496155,
"avg_line_length": 24.714284896850586,
"blob_id": "e11cb84d6bc245eaa5764c136da7be5997fda4c9",
"content_id": "1d0e9c0627ac29ecfabd3b55d067f8b8221bc919",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2674,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 84,
"path": "/athena/core/arm/Common/include/oam/task/nad_task_userfunc.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_task_userfunc.h\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:二期脚本中的用户自定义函数\n-------------------------------------------------------*/\n#ifndef _NAD_TASK_USERFUNC_H\n#define _NAD_TASK_USERFUNC_H\n\n\n//头文件\n#include \"nad_task_func.h\"\n#include \"info/nad_info.h\"\n\n//动作类型\n#define ACTION_CHANGE_LANE 1 //换道\n#define ACTION_ALARM 2 //触发告警\n\n//用户自定义动作\nclass nad_user_action\n{\npublic:\n double mileage; //动作点,在nad_route.center_line中的mileage里程,注意从center_line[0]计里程,而不是KP_ROUTE_START点\n int action; //动作类型,参考枚举值:ACTION_XXX\n int direction; //换道方向,参考枚举值:CL_DIRECTION_XXX\n int reason; //换道原因,参考枚举值:CL_REASON_XXX\n string alarm; //告警信息\n int32_t alarm_level; //告警级别,参考枚举值:ALARM_XXX\n bool execed; //是不是已经执行过\n\n //构造函数\n nad_user_action()\n {\n mileage = 100000000.0;\n action = ACTION_CHANGE_LANE;\n direction = CL_DIRECTION_LEFT;\n reason = CL_REASON_HAND;\n alarm_level = ALARM_ADVISE;\n execed = false;\n }\n};\n\n//用户自定义路径规划\nclass nad_user_route\n{\npublic:\n string name; //路径规划名称\n size_t begin; //起点下标(起点前可能补点,以便motion规划)\n vector<key_point> key_point_list; //关键点列表\n vector<nad_user_action> action_list; //动作点列表\n\npublic:\n //构造函数\n nad_user_route()\n {\n begin = 0;\n }\n\n //匹配经纬度\n bool find_route(double starting_lon, double starting_lat, double ending_lon, double ending_lat);\n\n //调试打印\n void show(const char *cap);\n};\n\n//用户自定义路径规划\nclass nad_user_route_list\n{\npublic:\n map<string, nad_user_route> route_list; //用户自定义路径列表\n map<string, vector<nad_user_route *> > obu_list; //每个obu可能使用哪些自定义路径\n\npublic:\n //用key_point_list首尾点匹配脚本路径,匹配到后替换key_point_list\n nad_user_route *find_route(string obu_name, vector<key_point> &key_point_list, size_t &begin, int32_t type = 0);\n\n //从脚本函数add_route中加载路径规划\n int task_add_route(nad_task_func &func, string &ret, string &desc);\n\n //从脚本函数set_obu中加载obu的路径列表\n int task_set_route(nad_task_func &func, string &ret, string &desc);\n};\n\n#endif\n"
},
{
"alpha_fraction": 0.5791006088256836,
"alphanum_fraction": 0.5975412726402283,
"avg_line_length": 20.76760482788086,
"blob_id": "359eb13f21e513fa7bb9bfb2fd665f1486e20a7c",
"content_id": "96abbb8cb1857c101f9898af47c13f16e4323ee3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3817,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 142,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/control_logic_config.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control_logic_config.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROL_LOGIC_CONTROL_LOGIC_CONFIG_H_\n#define CONTROL_LOGIC_CONTROL_LOGIC_CONFIG_H_\n\n#include <functional>\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class ControlLogicConfig\n *\n * @brief ControlLogicConfig.\n */\nclass ControlLogicConfig{\npublic:\n ControlLogicConfig() = default;\n ~ControlLogicConfig() = default;\n ///调试开关\n int32_t gui_enable_;\n ///车辆类型\n int32_t vehicle_type_;\n ///车辆类型名\n std::string vehicle_type_name_;\n ///工作模式调试 可以单独进行横向或者纵向控制\n int32_t debug_driving_mode_;\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_steering_driving_mode_;\n ///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_acc_driving_mode_;\n ///刹车控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_brake_driving_mode_;\n ///EPB控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t debug_epb_driving_mode_;\n ///前轮侧偏刚度\n double cf_;\n ///后轮侧偏刚度\n double cr_;\n ///车辆重量\n double vehicle_weight_;\n ///车长\n double vehicle_length_;\n ///前轮轴距\n double lf_;\n ///后轮轴距\n double lr_;\n ///车辆高度\n double h_;\n ///前轮轮距\n double vehicle_width_;\n ///轮距\n double wheelbase_ = 1.720;\n ///车轮半径\n double wheel_radius_;\n ///转向比\n double steer_tranmission_ratio_;\n ///最大刹车值\n double max_brake_value_;\n ///怠速状态最大减速度\n double max_deceleration_in_idle_;\n ///车辆最小速度\n double min_speed_;\n ///车辆限速\n double vechile_speed_max_;\n ///横向控制动态kp\n double moving_kp_;\n ///横向控制PID调节P值\n double lat_kp_;\n ///横向控制PID调节I值\n double lat_ki_;\n ///横向控制PID调节D值\n double lat_kd_;\n ///纵向控制PID调节P值\n double lon_kp_;\n ///纵向控制PID调节I值\n double lon_ki_;\n ///纵向控制PID调节D值\n double lon_kd_;\n ///地图坐标原点纬度\n double origin_lat_;\n ///地图坐标原点经度\n double origin_lon_;\n ///位置误差门限值\n double max_position_error_;\n ///最大转向角\n double max_steering_angle_;\n ///最小转向角\n double min_steering_angle_;\n\n //动态kp值变化 suggest_kp = steer_angle/kp_slope_ + kp_value_\n double kp_slope_;\n double kp_value_;\n ///预描距离\n double xla_;\n ///位置误差比重\n double k_e_err_;\n ///角度误差比重\n double k_fi_err_;\n ///总误差比重\n double k_ela_;\n\n ///控制器选择\n int32_t controller_switch_;\n ///LQR Q加权矩阵\n std::vector<double> lqr_matrix_q_;\n ///LQR离散时长\n double lqr_ts_=0.01;\n ///LQR预测窗口大小\n double lqr_preview_window_=0;\n ///LQR计算阀值\n double lqr_eps_=0.01;\n ///LQR滤波器窗口大小\n double lqr_mean_filter_window_size_=10;\n ///LQR最大迭代次数\n double lqr_max_iteration_=150;\n ///LQR横向最大加速度\n double lqr_max_lateral_acceleration_=5.0;\n ///最小速度保护\n double lqr_minimum_speed_protection_=0.1;\n ///\n int32_t lqr_cutoff_freq_;\n ///横向误差调节器 避免误差过大的时候有较大调节\n std::vector<Scheduler> lqr_lat_err_scheduler_init_;\n ///航向角误差调节器 避免误差过大的时候有较大调节\n std::vector<Scheduler> lqr_heading_err_scheduler_init_;\n};\n}\n}\n\n#endif // CONTROL_LOGIC_CONTROL_LOGIC_CONFIG_H_\n"
},
{
"alpha_fraction": 0.6100107431411743,
"alphanum_fraction": 0.6195659041404724,
"avg_line_length": 39.520511627197266,
"blob_id": "a789d1218158b6c1cda2e9c6e14a5443d42c8d00",
"content_id": "5d71ac60f721de98c43b1d36b4acc9b2d8546361",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 16651,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 390,
"path": "/athena/examples/LCM/Singlecar/control/apps/control.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"control.h\"\n#include \"control_view/control_view.h\"\n\n\n\nnamespace athena{\nnamespace control{\n\nControl::Control(std::string software_version)\n{\n software_version_ = software_version;\n}\n\nvoid Control::Init()\n{\n ///读配置文件设置\n std::cout<<\"config_file_path_:\"<<config_file_path_<<endl;\n main_config_file_ = new Config(config_file_path_);\n\n //调试\n gui_enable_ = main_config_file_ -> Read(\"GUI_ENABLE\",1);\n debug_enable_ = main_config_file_ -> Read(\"DEBUG_ENABLE\",1);\n Logging::Init(debug_enable_);\n print_enable_ = main_config_file_ -> Read(\"PRINT_ENABLE\",1);\n log_enable_ = main_config_file_ -> Read(\"LOG_ENABLE\",1);\n debug_print_period_ = main_config_file_ -> Read(\"DEBUG_PRINT_PERIOD\",0);\n debug_log_period_ = main_config_file_ -> Read(\"DEBUG_LOG_PERIOD\",0);\n init_map_from_file_ = main_config_file_ -> Read(\"INIT_MAP_FROM_FILE\",0);\n\n vehicle_type_ = main_config_file_ -> Read(\"VEHICLE_TYPE\",1);\n\n switch(vehicle_type_)\n {\n case CS55:\n break;\n case TRUCK_J6P:\n break;\n default:\n Logging::LogInfo(Logging::ERROR,\"unknown vehicle type\");\n break;\n }\n\n vehicle_type_name_ = main_config_file_ -> Read(\"VEHICLE_TYPE_NAME\",vehicle_type_name_);\n vehicle_config_file_path_ = \"config/\"+vehicle_type_name_ + \"/\" +vehicle_type_name_+\"_\" + \"config.cfg\";\n std::cout<<\"vehicle_config_file_path_:\"<<vehicle_config_file_path_<<endl;\n\n vehicle_config_file_ = new Config(vehicle_config_file_path_);\n //读配置文件\n ReadConfigFile();\n //控制逻辑初始化\n control_logic_.Init(control_logic_config_);\n control_logic_.start();\n\n if(gui_enable_ == 1)\n {\n ControlView::Init(&control_logic_, vehicle_width_);\n ControlView::MyGLDispIni();\n }\n\n if(init_map_from_file_ == 1)\n {\n local_trajectory_path = \"looptest.txt\";\n local_trajectory_path = main_config_file_ -> Read(\"LOCAL_TRAJECTORY_PATH\",local_trajectory_path);\n track_trajectory_ = new TrackTrajectory();\n track_trajectory_ -> Init(local_trajectory_path,&control_logic_);\n }\n\n control_debug_ = new ControlDebug<Control>();\n\n if(print_enable_ == 1)\n {\n control_debug_ -> AddPrint(debug_print_period_,&Control::print,this);\n }\n\n if(log_enable_ == 1)\n {\n control_debug_ -> AddLog(debug_log_period_,&Control::log,this);\n }\n}\n\nvoid Control::print()\n{\n printf(\"\\033[2J\");\n //std::cout.imbue(locale(\"chs\"));\n\n std::cout<<\"ver:\" <<software_version_<<\" 软件版本\"<<endl;\n std::cout<<\"--------------location info---------\"<<endl;\n std::cout<<\"controller_switch:\" <<controller_switch_<<\" 控制器\"<<endl;\n std::cout<<\"vehicle_type:\" <<vehicle_type_<<\" 车辆类型\"<<endl;\n std::cout << \"lat:\" << control_logic_debug_output_.localization_output_.lat_ << \" lon:\" << control_logic_debug_output_.localization_output_.lon_ << \" 经纬度\" << endl;\n std::cout << \"heading:\" << control_logic_debug_output_.localization_output_.heading_ << \" 航向角\" << endl;\n std::cout << \"ins_speed:\" << control_logic_debug_output_.localization_output_.longitudinal_speed_ << \" 速度\" << endl;\n std::cout << \"heading_speed:\" << control_logic_debug_output_.localization_output_.heading_speed_ << \" 航向角速度\" << endl;\n\n std::cout<<\"--------------vehicle info---------\"<<endl;\n control_logic_.GetControlLogicDebugOutput(control_logic_debug_output_);\n std::cout << \"steer_angle:\" << control_logic_debug_output_.chassis_detail_output_.steering_angle_feedback_ << \"deg 转向角\" << endl;\n std::cout << \"vehicle_speed:\" << control_logic_debug_output_.chassis_detail_output_.car_speed_*3.6 << \"km/h 车速\" << endl;\n std::cout << \"gear:\" << control_logic_debug_output_.chassis_detail_output_.at_gear_ << \"档位\" << endl;\n std::cout <<\"tar_speed:\" << control_logic_debug_output_.lon_controller_tar_speed_<<\" 目标速度\"<<endl;\n\n std::cout<<\"--------------vehicle cmd---------\"<<endl;\n std::cout << \"steering_angle_:\"<<control_cmd_.steering_angle_ << \" 转向角\"<<endl\n << \"steering_angle_speed_:\"<<control_cmd_.steering_angle_speed_ << \" 转向角速度\"<<endl\n << \"steering_driving_mode:\"<<control_cmd_.steering_driving_mode_ << \" 转向工作模式\"<<endl\n\n << \"accelerate_value:\"<<control_cmd_.acc_value_ << \" 油门值\"<<endl\n << \"accelerate_driving_mode_:\"<<control_cmd_.accelerate_driving_mode_ << \" 油门工作模式\"<<endl\n\n << \"brake_value_:\"<<control_cmd_.brake_value_ << \" 刹车值\"<<endl\n << \"brake_driving_mode_:\"<<control_cmd_.brake_driving_mode_ << \" 刹车工作模式\"<<endl\n << \"epb_enable_:\"<<control_cmd_.epb_enable_ << \" EPB使能状态\"<<endl\n << \"epb_driving_mode_:\"<<control_cmd_.epb_driving_mode_ << \" EPB工作状态\"<<endl\n << \"gear_lever_:\"<<control_cmd_.gear_lever_ << \" 档杆位置\"<<endl;\n}\n\n\nvoid Control::log()\n{\n// string log_filename = \"./log/control\" + start_time + \".log\";\n// ofstream logfile(log_filename.c_str(),std::ofstream::app); // 45 speed=7\n// logfile.precision(13);\n// logfile << dec\n// <<software_version<<\" \"\n// <<control_cmd_.steering_angle_ <<\" \"\n// <<control_cmd_.steering_angle_speed_<<\" \"\n// <<control_cmd_.accelerate_driving_mode_<<\" \"\n// <<control_cmd_.brake_value_<<\" \"\n// <<control_cmd_.brake_driving_mode_ <<\" \"\n// <<control_cmd_.epb_enable_<<\" \"\n// <<control_cmd_.epb_driving_mode_<<\" \"\n// <<control_cmd_.gear_lever_ <<\" \"\n// <<control_logic_debug_output_.lon_controller_tar_speed_<<\" \"\n// << endl;\n// logfile.close();\n}\n\n\n\nvoid Control::run()\n{\n while(1)\n {\n PublishControlOutputOnTimer();\n //std::cout<<\"control run\"<<endl;\n //10ms\n usleep(20000);\n }\n}\n\nvoid Control::ResigerMessageManger(MessageManger *message_manger)\n{\n message_manger_ = message_manger;\n}\n\nvoid Control::PublishControlOutputOnTimer()\n{\n //紧急事件\n Emergency energnecy;\n if(control_logic_.GetControllerAlarmInfo(&energnecy) == true)\n {\n message_manger_ -> PublishEmergencyCmd(energnecy);\n }\n control_logic_.GetControlCmd(control_cmd_);\n PublishControlCmd(control_cmd_);\n control_logic_.GetControlInfoReport(control_info_report_);\n PublishControlInfoReport(control_info_report_);\n control_logic_.GetBcmControlCmd(bcm_control_cmd_);\n PublishBcmControlCmd(bcm_control_cmd_);\n}\n\nvoid Control::PublishBcmControlCmd(BcmControlCmd bcm_control_cmd)\n{\n message_manger_ -> PublishBcmControlCmd(bcm_control_cmd);\n}\n\nvoid Control::PublishControlCmd(ControlCmd control_cmd)\n{\n message_manger_ -> PublishControlCmd(control_cmd);\n}\n\nvoid Control::PublishControlInfoReport(ControlInfoReport control_info_report)\n{\n message_manger_ -> PublishControlInfoReport(control_info_report);\n}\n\n\nvoid Control::ReadConfigFile()\n{\n message_type_ = main_config_file_ -> Read(\"MESSAGE_TYPE\",message_type_);\n\n switch(message_type_)\n {\n case LCM:\n {\n string obu_url = \"udpm://239.255.76.64:7604?ttl=3\";\n std::cout<<\"message type:LCM\"<<endl;\n ResigerMessageManger(&lcm_message_manger_);\n\n message_manger_ -> Init(main_config_file_ -> Read(\"OBU_URL\",obu_url),&control_logic_);\n }\n break;\n default:\n {\n Logging::LogInfo(Logging::ERROR,\"unknown message type\");\n }\n break;\n }\n\n ///调试开关\n control_logic_config_.gui_enable_ = gui_enable_;\n\n ///车辆类型\n control_logic_config_.vehicle_type_ = vehicle_type_;\n ///车辆类型名\n control_logic_config_.vehicle_type_name_ = vehicle_type_name_;\n\n debug_driving_mode_ = vehicle_config_file_ -> Read(\"DEBUG_DRIVING_MODE\",0);\n control_logic_config_.debug_driving_mode_ = debug_driving_mode_;\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n debug_steering_driving_mode_ = vehicle_config_file_ -> Read(\"DEBUG_LAT_DRIVING_MODE\",0);\n control_logic_config_.debug_steering_driving_mode_ = debug_steering_driving_mode_;\n\n ///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n debug_brake_driving_mode_ = debug_acc_driving_mode_ = vehicle_config_file_ -> Read(\"DEBUG_LON_DRIVING_MODE\",0);\n control_logic_config_.debug_acc_driving_mode_ = debug_acc_driving_mode_;\n control_logic_config_.debug_brake_driving_mode_ = debug_brake_driving_mode_;\n\n ///EPB控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n debug_epb_driving_mode_ = vehicle_config_file_ -> Read(\"DEBUG_EPB_DRIVING_MODE\",0);\n control_logic_config_.debug_epb_driving_mode_ = debug_epb_driving_mode_;\n\n ///前轮侧偏刚度\n vehicle_cf_ = vehicle_config_file_ -> Read(\"VEHICLE_CF\",0.0);\n control_logic_config_.cf_ = vehicle_cf_;\n\n ///后轮侧偏刚度\n vehicle_cr_ = vehicle_config_file_ -> Read(\"VEHICLE_CR\",0.0);\n control_logic_config_.cr_ = vehicle_cr_;\n\n ///车辆重量\n vehicle_weight_ = vehicle_config_file_ -> Read(\"VEHICLE_WEIGHT\",0.0);\n control_logic_config_.vehicle_weight_ = vehicle_weight_;\n\n ///车辆高度\n vehicle_height_ = vehicle_config_file_ -> Read(\"VEHICLE_HEIGHT\",0.0);\n control_logic_config_.h_ = vehicle_height_;\n ///车长\n vehicle_length_ = vehicle_config_file_ -> Read(\"VEHICLE_LENGTH\",0.0);\n control_logic_config_.vehicle_length_ = vehicle_length_;\n\n ///车辆宽度\n vehicle_width_ = vehicle_config_file_ -> Read(\"VEHICLE_WIDTH\",0.0);\n control_logic_config_.vehicle_width_ = vehicle_width_;\n\n ///轴距\n wheelbase_ = vehicle_config_file_ -> Read(\"WHEELBASE\",0.0);\n control_logic_config_.wheelbase_ = wheelbase_;\n ///前轮轴距\n vehicle_l_front_ = vehicle_config_file_ -> Read(\"VEHICLE_L_FRONT\",0.0);\n control_logic_config_.lf_ = vehicle_l_front_;\n ///后轮轴距\n vehicle_l_after_ = vehicle_config_file_ -> Read(\"VEHICLE_L_AFTER\",0.0);\n control_logic_config_.lr_ = vehicle_l_after_;\n ///车轮半径\n vehicle_wheel_radius_ = vehicle_config_file_ -> Read(\"VEHICLE_WHEEL_RADIUS\",0.0);\n control_logic_config_.wheel_radius_ = vehicle_wheel_radius_;\n ///转向比\n k_trans_ = vehicle_config_file_ -> Read(\"K_TRANS\",k_trans_);\n control_logic_config_.steer_tranmission_ratio_ = k_trans_;\n ///最大刹车值\n max_brake_value_ = vehicle_config_file_ -> Read(\"MAX_BRAKE_VALUE\",0.0);\n control_logic_config_.max_brake_value_ = max_brake_value_;\n ///怠速状态最大减速度\n max_deceleration_in_idle_ = vehicle_config_file_ -> Read(\"MAX_DECELERATION_IN_IDLE\",0.0);\n control_logic_config_.max_deceleration_in_idle_ = max_deceleration_in_idle_;\n ///车辆限速\n vechile_speed_max_ = vehicle_config_file_ -> Read(\"VECHILE_SPEED_MAX\",0.0);\n control_logic_config_.vechile_speed_max_ = vechile_speed_max_;\n ///横向控制动态kp\n moving_kp_ = vehicle_config_file_ -> Read(\"MOVING_KP\",0.0);\n control_logic_config_.moving_kp_ = moving_kp_;\n ///横向控制PID调节P值\n lat_kp_value_ = vehicle_config_file_ -> Read(\"LAT_KP_VALUE\",0.0);\n control_logic_config_.lat_kp_ = lat_kp_value_;\n ///横向控制PID调节I值\n lat_ki_value_ = vehicle_config_file_ -> Read(\"LAT_KI_VALUE\",0.0);\n control_logic_config_.lat_ki_ = lat_ki_value_;\n ///横向控制PID调节D值\n lat_kd_value_ = vehicle_config_file_ -> Read(\"LAT_KD_VALUE\",0.0);\n control_logic_config_.lat_kd_ = lat_kd_value_;\n ///纵向控制PID调节P值\n lon_kp_value_ = vehicle_config_file_ -> Read(\"LON_KP_VALUE\",0.0);\n control_logic_config_.lon_kp_ = lon_kp_value_;\n ///纵向控制PID调节I值\n lon_ki_value_ = vehicle_config_file_ -> Read(\"LON_KI_VALUE\",0.0);\n control_logic_config_.lon_ki_ = lon_ki_value_;\n ///纵向控制PID调节D值\n lon_kd_value_ = vehicle_config_file_ -> Read(\"LON_KD_VALUE\",0.0);\n control_logic_config_.lon_kd_ = lon_kd_value_;\n ///地图坐标原点纬度\n origin_lat_ = vehicle_config_file_ -> Read(\"ORIGIN_LAT\",0.0);\n control_logic_config_.origin_lat_ = origin_lat_;\n ///地图坐标原点经度\n origin_lon_ = vehicle_config_file_ -> Read(\"ORIGIN_LON\",0.0);\n control_logic_config_.origin_lon_ = origin_lon_;\n ///位置误差门限值\n max_position_error_ = vehicle_config_file_ -> Read(\"MAX_POSITION_ERROR\",0.0);\n control_logic_config_.max_position_error_ = max_position_error_;\n ///最大转向角\n max_steering_angle_ = vehicle_config_file_ -> Read(\"MAX_STEERING_ANGLE\",0.0);\n\n control_logic_config_.max_steering_angle_ = max_steering_angle_;\n ///最小转向角\n min_steering_angle_ = vehicle_config_file_ -> Read(\"MIN_STEERING_ANGLE\",0.0);\n control_logic_config_.min_steering_angle_ = min_steering_angle_;\n\n //动态kp值变化 suggest_kp = steer_angle/kp_slope_ + kp_value_\n kp_slope_ = vehicle_config_file_ -> Read(\"KP_SLOPE\",0.0);\n control_logic_config_.kp_slope_ = kp_slope_;\n kp_value_ = vehicle_config_file_ -> Read(\"KP_VALUE\",0.0);\n control_logic_config_.kp_value_ = kp_value_;\n ///预描距离\n xla_ = vehicle_config_file_ -> Read(\"XLA\",20.0);\n control_logic_config_.xla_ = xla_;\n ///位置误差比重\n k_e_err_ = vehicle_config_file_ -> Read(\"K_E_ERR\",1.0);\n control_logic_config_.k_e_err_ = k_e_err_;\n ///角度误差比重\n k_fi_err_ = vehicle_config_file_ -> Read(\"K_FI_ERR\",1.0);\n control_logic_config_.k_fi_err_ = k_fi_err_;\n ///总误差比重\n k_ela_ = vehicle_config_file_ -> Read(\"K_ELA\",1.0);\n control_logic_config_.k_ela_ = k_ela_;\n\n ///LQR Q加权矩阵\n int32_t lqr_matrix_q_size = vehicle_config_file_ -> Read(\"lqr_matrix_q_size\",5);\n for(int i = 0;i < lqr_matrix_q_size;i++)\n {\n std::string str;\n stringstream ss;\n ss<<i;\n std::string s1 = ss.str();\n str = \"lqr_matrix_q_\" + s1;\n lqr_matrix_q_.push_back(vehicle_config_file_ -> Read(str,0));\n //std::cout<<\"str:\"<<str<<endl;\n control_logic_config_.lqr_matrix_q_.push_back(vehicle_config_file_ -> Read(str,0.0));\n //std::cout<<\"q:\"<<lqr_matrix_q_[i]<<endl;\n }\n ///控制器选择\n controller_switch_ = vehicle_config_file_ -> Read(\"controller_switch\",1);\n control_logic_config_.controller_switch_ = controller_switch_;\n\n ///LQR离散时长\n lqr_ts_ = vehicle_config_file_ -> Read(\"lqr_ts\",0.01);\n control_logic_config_.lqr_ts_ = lqr_ts_;\n ///LQR预测窗口大小\n lqr_preview_window_ = vehicle_config_file_ -> Read(\"lqr_preview_window\",0);\n control_logic_config_.lqr_preview_window_ = lqr_preview_window_;\n ///LQR计算阀值\n lqr_eps_ = vehicle_config_file_ -> Read(\"lqr_eps\",0.01);\n control_logic_config_.lqr_eps_ = lqr_eps_;\n ///LQR滤波器窗口大小\n lqr_mean_filter_window_size_ = vehicle_config_file_ -> Read(\"lqr_mean_filter_window_size\",10);\n control_logic_config_.lqr_mean_filter_window_size_ = lqr_mean_filter_window_size_;\n ///LQR最大迭代次数\n lqr_max_iteration_ = vehicle_config_file_ -> Read(\"lqr_max_iteration\",150);\n control_logic_config_.lqr_max_iteration_ = lqr_max_iteration_;\n ///LQR横向最大加速度\n lqr_max_lateral_acceleration_ = vehicle_config_file_ -> Read(\"lqr_max_lateral_acceleration\",5.0);\n control_logic_config_.lqr_max_lateral_acceleration_ = lqr_max_lateral_acceleration_;\n ///最小速度保护\n lqr_minimum_speed_protection_ = vehicle_config_file_ -> Read(\"lqr_minimum_speed_protection\",0.1);\n control_logic_config_.lqr_minimum_speed_protection_ = lqr_minimum_speed_protection_;\n ///\n lqr_cutoff_freq_ = vehicle_config_file_ -> Read(\"lqr_cutoff_freq\",10);\n control_logic_config_.lqr_cutoff_freq_ = lqr_cutoff_freq_;\n ///\n lqr_mean_filter_window_size_ = vehicle_config_file_ -> Read(\"lqr_mean_filter_window_size\",10);\n control_logic_config_.lqr_mean_filter_window_size_ = lqr_mean_filter_window_size_;\n// ///横向误差调节器 避免误差过大的时候有较大调节\n// std::vector<Scheduler> lqr_lat_err_scheduler_init_;\n// ///航向角误差调节器 避免误差过大的时候有较大调节\n// std::vector<Scheduler> lqr_heading_err_scheduler_init_;\n// control_logic_config_.lqr_lat_err_scheduler_init_.assign(lqr_lat_err_scheduler_init_.begin(),lqr_lat_err_scheduler_init_.end());\n// control_logic_config_.lqr_heading_err_scheduler_init_.assign(lqr_heading_err_scheduler_init_.begin(),lqr_heading_err_scheduler_init_.end());\n}\n\n}\n}\n"
},
{
"alpha_fraction": 0.6776232719421387,
"alphanum_fraction": 0.6915296912193298,
"avg_line_length": 20.97222137451172,
"blob_id": "caf4a875d2c3380ee7a60902857f7b3afdbae6b0",
"content_id": "1b945dda664713ef53cf5f20e6a5446cbefb4f0e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 791,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 36,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/control_logic_debug_output.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control_logic_debug_output.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROL_LOGIC_CONTROL_LOGIC_DEBUG_OUTPUT_H_\n#define CONTROL_LOGIC_CONTROL_LOGIC_DEBUG_OUTPUT_H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class ControlLogicDebugOutput\n *\n * @brief ControlLogicDebugOutput.\n */\nclass ControlLogicDebugOutput{\npublic:\n ControlLogicDebugOutput() = default;\n ~ControlLogicDebugOutput() = default;\n\n double lon_controller_tar_speed_;\n ChassisDetail chassis_detail_output_;\n Localization localization_output_;\n};\n}//namespace control\n}//namespace athena\n#endif // CONTROL_LOGIC_CONTROL_LOGIC_DEBUG_OUTPUT_H_\n"
},
{
"alpha_fraction": 0.6908548474311829,
"alphanum_fraction": 0.7017892599105835,
"avg_line_length": 24.149999618530273,
"blob_id": "e14d14c5b8dd52052c02c55ca727fb728787385c",
"content_id": "365b0d59952323f7f989753ae40796d6194bc041",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1034,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 40,
"path": "/athena/core/x86/Control/include/common/map_matching/coordinate_transformation.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file coordinate_transformation.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n\n#ifndef MAP_MATCHING_COORDINATE_TRANSFORMATION_H_\n#define MAP_MATCHING_COORDINATE_TRANSFORMATION_H_\n\n#include \"../../localization.h\"\n#include \"../../local_localization.h\"\n#include \"../../controller_config.h\"\n\nusing namespace athena::control;\n\nusing namespace athena::control;\n\nclass CoordinateTransformation{\npublic:\n CoordinateTransformation() = default;\n ~CoordinateTransformation() = default;\n\n\n/**\n * @brief coordinate_transformation.\n * @param[in] localization 定位信息输入.\n * @param[out] local_localization 定位转换信息输入.\n * @return void.\n */\n void coordinate_transformation(const Localization * localiation,\n LocalLocalization & local_localization,const ControllerConfig * controller_config);\nprivate:\n double HeadingSpeedFilterDirect(double heading_speed);\n};\n\n#endif // MAP_MATCHING_COORDINATE_TRANSFORMATION_H_\n"
},
{
"alpha_fraction": 0.6434305906295776,
"alphanum_fraction": 0.6542001366615295,
"avg_line_length": 31.119497299194336,
"blob_id": "9275675070c2b4cfd7a49a07743c92e4018eaf9f",
"content_id": "0fee7b6c26d73031c194976ade194c12e149b1cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5853,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 159,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/ssd_detection/kf_tracker.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <opencv2/core/core.hpp>\n#include <opencv2/objdetect/objdetect.hpp>\n#include <opencv2/highgui/highgui.hpp>\n#include <opencv2/video/tracking.hpp>\n#include <opencv2/calib3d/calib3d.hpp>\n#include <opencv2/core/version.hpp>\n#include <opencv2/features2d/features2d.hpp>\n#include \"opencv2/imgproc/imgproc.hpp\"\n#include \"ssd_detection/camera_obj_list.hpp\"\n#include <iostream>\n#include <stdio.h>\n#include <sstream>\n#include <algorithm>\n#include <iterator>\n#define SSTR( x ) dynamic_cast< std::ostringstream & >( \\\n ( std::ostringstream() << std::dec << x ) ).str()\n\nusing namespace cv;\n\nstruct ObjectDetection_\n{\n\tcv::Rect rect;\n\tfloat score;\n\tint classification;\n\tint classID;\n};\n\nstatic int \t\t\tDEFAULT_LIFESPAN; //LIFESPAN of objects before stop being tracked, in frames\nstatic int\t \t\tINITIAL_LIFESPAN; //LIFESPAN of objects before stop being tracked, in frames\nstatic int\t\t\tORB_NUM_FEATURES;\nstatic unsigned int\tORB_MIN_MATCHES;\nstatic float\t\tORB_KNN_RATIO;\nstatic float \t\tNOISE_COV;\nstatic float \t\tMEAS_NOISE_COV;\nstatic float \t\tERROR_ESTIMATE_COV;\nstatic float \t\tOVERLAPPING_PERC;\nstatic bool \t\tSHOW_PREDICTIONS;\nstatic bool \t\tUSE_ORB;\nstatic bool \t\ttrack_ready_;\nstatic bool \t\tdetect_ready_;\n\nstruct kstate\n{\n\tcv::KalmanFilter\tKF;//KalmanFilter for this object\n\tcv::Rect pos;//position of the object centerx, centery, width, height\n cv::Scalar\tcolor;\t//ObjectDetection_ obj;//currently not used\n cv::Mat\t\t\timage;//image containing the detected and tracked object\n\tint obj_type;\n\tfloat\t\t\tscore;// score\n\tbool\t\t\tactive;//if too old (lifespan) don't use\n\tunsigned int\t\tid;//id of this tracked object\n\tint\t\t\tlifespan;//remaining lifespan before deprecate\n int\t\treal_data;\n\t//std::vector<KeyPoint> orbKeypoints;\n\t//cv::Mat\t\t\t\torbDescriptors;\n};\n\nclass Kftracker\n{\n\n public:\n /// 初始化跟踪参数\n void init_params();\n /** 运行kf跟踪\n * @param p1 输入检测后的Mat类型图片\n * @return 0\n **/\n int kf_run(cv::Mat &image);\n /// 坐标点转化为矩形框\n void getRectFromPoints(std::vector< cv::Point2f > corners, cv::Rect& outBoundingBox);\n /** 判断 roi是否存在包含关系\n * @param p1 BoundingBox from Detection\n * @param p2 BoundingBox from Predictor\n * @return true 如果b包含a 或者相反\n **/\n bool crossCorr(cv::Mat im1, cv::Mat im2);\n /** 预测框转化为Bbox\n * @param p1 跟新的bbox\n * @param p2 被预测跟踪后的bbox\n * @return true 如果b包含a 或者相反\n **/\n void posScaleToBbox(std::vector<kstate> kstates, std::vector<kstate>& trackedDetections);\n /** 获取新跟踪目标的id\n * @param 跟踪目标列表\n * @return 新目标的id\n **/\n int getAvailableIndex(std::vector<kstate>& kstates);\n /** 初始化卡尔曼跟踪器\n * @param p1 输入前帧检测目标列表\n * @param p2 在跟踪目标列表\n * @param p3 输入当前帧目标列表\n * @param p4 输入检测图片\n **/\n void initTracking(ObjectDetection_ object, std::vector<kstate>& kstates,ObjectDetection_ detection,cv::Mat& image);\n /** 检查上帧id是否已删除\n * @param p1 移除索引id列表\n * @param p2 匹配id\n * @return\n **/\n bool isInRemoved(std::vector<unsigned int> removedIndices, unsigned int index);\n /** 检查上帧id是否已删除\n * @param a 输入前帧检测目标列表\n * @param b 在跟踪目标列表\n **/\n void removeUnusedObjects(std::vector<kstate>& states);\n /** 检查id是否被占用\n * @param p1 检查id\n * @param p2 已有id列表\n **/\n bool alreadyMatched(int check_index, std::vector<int>& matched_indices);\n /** 基于置信度由高到低排序\n * @param p1 输入检测后置信度\n * @param p2 相对应分数索引号\n **/\n void Sort(const std::vector<float> in_scores, std::vector<unsigned int>& in_out_indices);\n /** 滤除重叠目标框,保留最大框 非极大值抑制\n * @param p1 跟踪后预测目标\n * @param p2 最低非极大值抑制值\n **/\n void ApplyNonMaximumSuppresion(std::vector< kstate >& in_source, float in_nms_threshold);\n /**跟踪及匹配id\n * @param p1 输入检测后的目标框\n * @param p2 帧计数\n * @param p3 跟新目标\n * @param p4 跟踪积极目标状态\n * @param p5 输入检测后图片\n * @param p6 trackedDetections 被跟踪的目标\n **/\n void doTracking(std::vector<ObjectDetection_>& detections, int frameNumber,\n std::vector<kstate>& kstates, std::vector<bool>& active, cv::Mat& image, std::vector<kstate>& trackedDetections);\n ///\n void publish_if_possible();\n /** 显示出跟踪预测后的目标框\n * @param p1 输入检测图片\n * @param p2 帧计数\n * @param p3 检测后目标\n * @param p4 跟踪预测后的目标\n * @param p5 跟踪目标的状态\n **/\n void trackAndDrawObjects(cv::Mat& image, int frameNumber, std::vector<ObjectDetection_> detections,\n std::vector<kstate>& kstates, std::vector<bool>& active);\n /// 输入检测图片的回调函数\n void image_callback(cv::Mat &imageTrack);\n /// 检测后跟踪回调函数\n void detections_callback(std::vector<camera_obj> &get_camera_objs);\n /** 匹配相交面积比IOU\n * @param p1 BoundingBox\n * @param p2 BoundingBox\n * @return 相交面积比IOU\n **/\n float bboxOverlap(cv::Rect &box1,cv::Rect &box2);\n /** 两个Bounding box欧式距离\n * @param p1 BoundingBox\n * @param p2 BoundingBox\n * @return 欧式距离\n **/\n float bboxDistance(cv::Rect &box1,cv::Rect &box2);\n // bool orbMatch(cv::Mat& inImageScene, cv::Mat& inImageObj, cv::Rect& outBoundingBox, unsigned int inMinMatches=2, float inKnnRatio=0.7);\n};\n"
},
{
"alpha_fraction": 0.5584862232208252,
"alphanum_fraction": 0.5768348574638367,
"avg_line_length": 17.16666603088379,
"blob_id": "12625b190156694ab85fd265c135c21f267923f9",
"content_id": "6a9dbbf06f523083f13a327d25c3c30064354044",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1102,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 48,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/camera_obj_list.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include <iostream>\n#include <opencv2/core/core.hpp>\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n\nusing namespace std;\n\n///图像目标(单个)属性\nclass camera_obj\n{\npublic:\n ///目标id,与连续跟踪有关\n int id;\n /// 目标稳定性 0;未知 1;稳定 2: 不稳定目标\n int stability;\n ///行人/车辆/ car :7 person: 15 bus: 6\n int classification;\n /// 目标检测跟踪状态 1;未跟踪 2: acc障碍物\n int detection_status;\n ///横向位置\n double lat_pos;\n ///横向速度\n double lat_rate;\n ///纵向位置\n double lon_pos;\n ///纵向速度\n double lon_rate;\n ///宽度,单位:m\n double width;\n ///检测目标的置信度\n float score;\n /// 目标矩形框像素\n cv::Rect box_point;\n\n};\n\n///图像目标(数组)列表\nclass camera_objs_list\n{\npublic:\n /// 目标框数目\n int32_t num_of_camera_obj;\n /// 预留位\n int32_t reserve;\n /// 目标障碍物列表\n std::vector< camera_obj > cam_objs;\n};\n"
},
{
"alpha_fraction": 0.7111111283302307,
"alphanum_fraction": 0.7111111283302307,
"avg_line_length": 43.83333206176758,
"blob_id": "f44ba7c973be0b46ed1ec8dc83272eb83597ebea",
"content_id": "9eaa35ce7157fb4549b4e227524668bacd76997c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 298,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 6,
"path": "/athena/install_arm.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#将动态库文件拷贝到系统目录下\nsudo -S cp core/arm/Map/lib/*.so /usr/local/lib\nsudo -S cp core/arm/Navi/lib/*.so /usr/local/lib\nsudo -S cp core/arm/Planning/lib/*.so /usr/local/lib\nsudo -S cp core/arm/Control/lib/*.so /usr/local/lib\nsudo -S cp core/arm/Common/lib/*.so /usr/local/lib\n\n"
},
{
"alpha_fraction": 0.7395274043083191,
"alphanum_fraction": 0.7545650005340576,
"avg_line_length": 21.530864715576172,
"blob_id": "1c038205844bc660a9c22e411c546668b12c9c4e",
"content_id": "8b5a64cf7e559fe049ede64794cf37093618b660",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2954,
"license_type": "no_license",
"max_line_length": 198,
"num_lines": 81,
"path": "/athena/examples/LCM/Singlecar/control/文档说明.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "## 项目介绍: \nAthena软件套件控制层软件命名为control,与旧版本controller相比有以下区别: \n\n1. 软件框架优化,control软件分为三层: 应用层,逻辑层以及控制器\n\n * 应用层主要功能包括openGL显示. 消息收发,调试. 日志\n\n * 逻辑层主要是刹车/档位/油门/驾驶模式/EPB等的逻辑处理\n\n * 控制器为横纵向控制算法\n\n2. 消息更改,接收规划层bcm消息更改,控制命令信息更改\n\n3. 注释符合doxygen规范\n\n4. 代码风格为Google style\n\n## 框架图: \n\n\n\n## 模块介绍: \n\n### 消息收发模块: \n\n1. 接收消息: ins_info(导航数据). ChassisDetail(底盘反馈消息). mt_info_report(规划下发路径消息). mt_bcm_control_info(规划下发BCM消息)\n\n2. 发送消息: control_cmd(车辆控制指令). bcm_control_info(车辆BCM控制指令). control_info_report(control回馈给motion消息). emergency(紧急事件指令)\n\n3. 工程目录: control/Sources/apps/lcm/lcm_message_manger.cpp\n\n4. 功能实现: 接收motin消息. 车辆底层状态回馈消息和惯导定位消息,发送motion需要的控制回馈和车辆相关控制指令\n\n5. 函数接口: 接口为LCM标准接口\n\n### 横向算法模块: \n\n1. 功能实现: 使用规划下发的路径消息. 车辆底层反馈的底盘消息和定位消息,通过计算输出方向盘转角给逻辑层\n\n2. 工程目录: /controller_lib/Sources/lat_controller/lat_controller.cpp\n\n3. 函数接口: void LatController::LateralHybridControl(path * local_path, uint32_t match_point_no,const LocalLocalization * local_localiation,const Chassis *chassis,ControllerOutput * controller_output)\n\n### 纵向算法: \n\n1. 功能实现: 使用规划下发的路径消息. 车辆底层反馈的底盘消息和定位消息,通过计算输出油门和刹车. 同时输出各部分自动驾驶使能状态和Epb的使能给逻辑层;\n\n2. 工程目录: /controller_lib/Sources/lon_controller/lon_controller.cpp\n\n3. 函数接口: bool LonController::ComputeControlOutput(\npath * path,\nuint32_t match_point_no,\nconst LocalLocalization * local_localiation,\nconst Chassis *chassis,ControllerOutput * controller_output\n)\n\n \n### 驾驶模式设置模块: \n\n1. 功能实现: 根据纵向算法输出设置车辆各子执行器的驾驶模式,同时根据底层状态反馈判断是否可以执行该模式;\n\n2. 工程目录: control/Sources/control_logic/control_logic.cpp\n\n3. 函数接口: void ControlLogic::SetDrivingMode(int driving_mode)\n\n\n### GUI显示模块: \n\n1. 功能实现: 依据惯导. 地图消息和车辆信息绘制简化图形车辆模型和车道信息\n\n2. 工程目录: control/Sources/control_view/control_view.cpp\n\n3. 函数接口: void ControlView::myDisplay(void)\n\n### 调试模块: \n\n1. 功能实现: 读取配置文件信息,进行在线调试\n\n2. 工程目录: control/Sources/control.cpp\n\n3. 函数接口: void Control::ReadConfigFile()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.6885125041007996,
"alphanum_fraction": 0.6885125041007996,
"avg_line_length": 23.25,
"blob_id": "e497862277a3246a69a829eff7e58da8ee360986",
"content_id": "67d3111aaaba5e024ff4a55fc98c29f1b21f4be6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1358,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 56,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/globalVal.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _GLOABALVAL_INCLUDED\n#define _GLOABALVAL_INCLUDED\n\n#include <string>\n#include <lcm/lcm.h>\n#include <lcm/lcm-cpp.hpp>\n#include \"../sensor_lcm/cam_obj_list.hpp\"\n#include \"../lane_lcm/ins_info.hpp\"\n\nusing namespace std;\n\nclass Handler\n{\npublic:\n ~Handler() {}\n\n void handleMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const sensor_lcm::cam_obj_list* msg);\n\n void handleInsMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ins_info* msg);\n};\n\nextern string lcm_url;\nextern string camera_ini;\nextern string local_photo_path;\nextern int run_mode;\nextern int write_file;\nextern int calibration;\nextern int showRoadImage;\nextern int wait_key;\nextern float m_per_pix_i;\nextern float m_per_pix_j;\nextern double basler_brightness;\nextern float pt_brightness;\nextern float pt_shutter;\nextern float pt_gain;\nextern float pt_gamma;\nextern float pt_white_balanceA;\nextern float pt_white_balanceB;\n\nextern lcm::LCM *g_lcm;\n\nextern double start_fps_time, end_fps_time;\n\nextern bool draw_detect_flag;\nextern sensor_lcm::cam_obj_list cam_points;\nextern obu_lcm::ins_info cur_ins_info;\n\nextern Handler handler;\nextern void* Thread_lcm_Function(void* param);\n\nextern long getCurrentTime();\n#endif\n"
},
{
"alpha_fraction": 0.49889299273490906,
"alphanum_fraction": 0.507749080657959,
"avg_line_length": 21.966102600097656,
"blob_id": "ebab2eb150050bc6da2338b2b1fcf78d142d8334",
"content_id": "c5420daecba370ff903a399aba66143a204609f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1435,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 59,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/msg/ne_msg/ne_lcm.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef __NE_lcm_hpp__\n#define __NE_lcm_hpp__\n\n\n#include <lcm/lcm-cpp.hpp>\n#include \"ne_msg_t.hpp\"\n\n#include <iostream>\n\nclass NE_LCM: public lcm::LCM\n{\npublic:\n\n NE_LCM(std::string lcm_url) : lcm::LCM (lcm_url)\n {\n }\n\n //网元间消息用publish_nemsg发送\n //网元内消息用原始接口publish发送\n template <class T>\n int publish_nemsg(T &msg)\n {\n msg.encode_body();\n string tem(\"NEMSG_\");\n tem += stoupper(msg.header.local_ne_name);\n int ret = publish(tem, &msg);\n if (ret != 0 || msg.data_len > 8 * 1024)\n {\n printf(\"!!!! publish_nemsg(%s)->%s: body_len=%d, ret=%d\\n\",\n msg.header.peer_channel.c_str(), msg.header.peer_ne_name.c_str(), msg.data_len, ret);\n }\n return ret;\n };\n\n int async_handle(struct timeval tv)\n {\n int ret = 0;\n int lcm_fd = getFileno();\n fd_set readfds;\n FD_ZERO(&readfds);\n FD_SET(lcm_fd, &readfds);\n int status = 0;\n status = select (lcm_fd+1, &readfds, NULL, NULL, &tv);\n if (status < 0) //select出错的情况\n {\n std::cout<< \"ERROR! select failed\\n\"<< std::endl;\n return ret;\n }\n\n //status == 0表示超时,status > 0表示有文件描述符已经ready\n if (FD_ISSET (lcm_fd,&readfds))\n {\n ret = handle();\n }\n return ret;\n }\n};\n\n#endif\n"
},
{
"alpha_fraction": 0.40525394678115845,
"alphanum_fraction": 0.42136603593826294,
"avg_line_length": 21.296875,
"blob_id": "200f6a3fcdb97d14954976f23e8d540e0610b815",
"content_id": "007b03ef1ad34c257869d0d699860e041387d7fe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 7910,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 256,
"path": "/Doxygen.cc",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "基于Doxygen的C/C++注释原则\n标注总述\n1.文件头标注\n2. 命名空间标注\n3. 类、结构、枚举标注\n4. 函数注释原则\n5. 变量注释\n6. 模块标注\n7. 分组标注\n\n总述\n华丽的分隔线\n//---------------------------------------------------------------------------\n// Platform Defines\n//---------------------------------------------------------------------------\nenum\n{\n OST_PLATFORM_WIN32 = 1,\n OST_PLATFORM_LINUX_X86 = 2,\n OST_PLATFORM_LINUX_ARM = 3,\n OST_PLATFORM_ANDROID = 4,\n OST_PLATFORM_MACOSX = 5,\n};\n\n//---------------------------------------------------------------------------\n// API Export/Import Macros\n//---------------------------------------------------------------------------\n/** Indicates an exported and imported shared library function. */ \n#define OST_API_EXPORT __declspec(dllexport)\n#define OST_API_IMPORT __declspec(dllimport)\n\n//---------------------------------------------------------------------------\n// Digital Image Macros\n//---------------------------------------------------------------------------\n#define OST_PI 3.141592653589793f\n#define OST_RGB2GRAY(r, g, b) ( ((b) * 117 + (g) * 601 + (r) * 306) >> 10 )\n\n//---------------------------------------------------------------------------\n// date and time at compile time\n//---------------------------------------------------------------------------\n\n#define OST_TIMESTAMP __DATE__ \" \" __TIME__\n1. 文件头的标注\n/**\n\n* @file filename\n\n* @brief This is a brief description.\n\n* @details This is the detail description.\n\n* @author author\n\n* @date date\n\n* @version A001\n\n* @par Copyright (c):\n\n* XXX公司\n\n* @par History: \n\n* version: author, date, desc\\n\n\n*/\n2.命名空间\n /**\n * @brief 命名空间的简单概述 \\n(换行)\n * 命名空间的详细概述\n */\n namespace OST\n {\n }\n\n3. 类、结构、枚举标注\n /**\n * @brief 类的简单概述 \\n(换行)\n * 类的详细概述\n */\n class Example\n {\n };\n \n 枚举类型定义、结构体类型定义注释风格类似\n /** \n * @brief 简要说明文字 \n */\n typedef struct 结构体名字\n {\n 成员1, /*!< 简要说明文字 */ or ///<说明, /**<说明 */ 如果不加<,则会认为是成员2的注释\n 成员2, /*!< 简要说明文字 */ or ///<说明, /**<说明 */ \n 成员3, /*!< 简要说明文字 */ or ///<说明, /**<说明 */ \n }结构体别名;\n\n4. 函数注释原则\n /** \n * @brief 函数简要说明-测试函数\n * @param index 参数1\n * @param t 参数2 @see CTest\n *\n * @return 返回说明\n * -<em>false</em> fail\n * -<em>true</em> succeed\n */\n bool Test(int index, const CTest& t);\n \n note:指定函数注意项事或重要的注解指令操作符\n note格式如下:\n @note 简要说明\n\n retval:指定函数返回值说明指令操作符。(注:更前面的return有点不同.这里是返回值说明)\n retval格式如下:\n @retval 返回值 简要说明\n \n pre:指定函数前置条件指令操作符\n pre格式如下:\n @pre 简要说明\n \n par:指定扩展性说明指令操作符讲。(它一般跟code、endcode一起使用 )\n par格式如下:\n @par 扩展名字\n \n code、endcode:指定\n code、endcode格式如下:\n @code\n 简要说明(内容)\n @endcode\n\n see:指定参考信息。\n see格式如下:\n @see 简要参考内容\n \n deprecated:指定函数过时指令操作符。\n deprecated格式如下:\n @deprecated 简要说明 \n\n 调试Bug说明\n 解决的bug说明,@bug\n 警告说明 (warning)\n 定义一些关于这个函数必须知道的事情,@warning\n 备注说明 (remarks)\n 定义一些关于这个函数的备注信息,@remarks\n 将要完成的工作 (todo)\n 说明哪些事情将在不久以后完成,@todo\n 使用例子说明 (example)\n 例子说明,@example example.cpp\n\n/**\n * @brief 打开文件 \\n\n * 文件打开成功后,必须使用::CloseFile函数关闭\n * @param[in] fileName 文件名\n * @param[in] fileMode 文件模式,可以由以下几个模块组合而成:\n * -r读取\n * -w 可写\n * -a 添加\n * -t 文本模式(不能与b联用)\n * -b 二进制模式(不能与t联用)\n * @return 返回文件编号\n * --1表示打开文件失败(生成时:.-1)\n * @note文件打开成功后,必须使用::CloseFile函数关闭\n * @par 示例:\n * @code\n * //用文本只读方式打开文件\n * int ret = OpenFile(\"test.txt\", \"a\");\n * @endcode\n * @see 函数::ReadFile::CloseFile (“::”是指定有连接功能,可以看文档里的CloseFile变成绿,点击它可以跳转到CloseFile.)\n * @deprecated由于特殊的原因,这个函数可能会在将来的版本中取消\n */\n int OpenFile(const char* fileName, const char* fileMode);\n \n /**\n * @brief 关闭文件\n * @param [in] file 文件\n *\n * @retval 0 成功\n * @retval -1 失败\n * @pre file 必须使用OpenFile的返回值\n */ \n int CloseFile(int file);\n \n -:生成一个黑心圆.\n -#:指定按顺序标记。\n :::指定连接函数功能。(注:空格和“:”有连接功能,但建议还是使用”::”。只对函数有用。)\n 它们格式如下: (-和::例子前面有了,就介绍-#例子。)\n - 简要说明\n -# 简要说明\n ::函数名\n 例:\n /**\n * @param [in] person 只能输入以下参数:\n * -# a:代表张三 // 生成 1. a:代表张三\n * -# b:代表李四 // 生成 2. b:代表李四\n * -# c:代表王二 // 生成 3. c:代表王二\n */\n void GetPerson(int p);\n \n5. 变量注释\n /// 简述\n /** 详细描述. */\n 或者\n //! 简述\n //! 详细描述\n //! 从这里开始\n int m_variable_1; ///< 成员变量m_variable_1说明\n int m_variable_2; ///< 成员变量m_variable_1说明\n \n /**\n * @brief 成员变量m_c简要说明\n *\n * 成员变量m_variable_3的详细说明,这里可以对变量进行\n * 详细的说明和描述,具体方法和函数的标注是一样的\n */\n bool m_variable_3;\n 如果变量需要详细说明的可已按照m_varibale_3的写法写,注意,m_variable_2和m_variable_3之间一定需要空行,否则会导致m_variable_2的简述消失\n \n6. 模块标注\n 模块定义格式:\n /**\n * @defgroup 模块名 页的标题名 (模块名只能英文,这个可以随便取.在一个源文件里不能相同)\n * @{ (跟c语言{一样起作用域功能)\n */\n … 定义的内容 …\n /** @} */\n \n 例:\n /**\n * @defgroup HenryWen Example.cpp\n * @{\n */\n … 定义的内容 …\n /** @} */\n \n7. 分组标注\n 分组定义格式:\n /**\n * @name 分组说明文字\n * @{\n */\n … 定义的内容 …\n /** @} */\n \n 例:\n /**\n * @name PI常量\n * @{\n */\n #define PI 3.1415926737\n /** @} */\n \n /**\n * @name 数组固定长度常量\n * @{\n */\n const int g_ARRAY_MAX = 1024;\n /** @} */\n\n\n"
},
{
"alpha_fraction": 0.5849126577377319,
"alphanum_fraction": 0.6053511500358582,
"avg_line_length": 20.70161247253418,
"blob_id": "7279002c784898bdc696163c200a00b80ae10d43",
"content_id": "130594680a64aa6dcdd945d53479fff7b93fee2a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3407,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 124,
"path": "/athena/core/arm/Map/include/MapData.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n* @file MapData.h\n* @brief 地图数据类型定义\n* @details 定义地图中的红绿灯、限速牌、施工标志、路口、车道\n* @author huanhuan\n* @date 2018/7/16\n* @version v1.0\n* @par Copyright (c):\n* 武汉环宇智行科技有限公司\n* @par History:\n* version: author, date, desc\\n\n*/\n#ifndef _MAP_DATA_H\n#define _MAP_DATA_H\n\n\n#include\"LaneletMap.hpp\"\n//使用glog应该包含的头文件\n#include <glog/logging.h>\n#include <glog/raw_logging.h> //支持线程安全\n//支持boost\n#include <boost/filesystem.hpp>\n#include <boost/tokenizer.hpp>\n#include <boost/lexical_cast.hpp>\n#include <boost/algorithm/string.hpp>\n\nusing namespace LLet;\n\nnamespace athena\n{\nnamespace roadmap\n{\n#define NEAREST_LANE_LIST 30.0\ntypedef LaneletMap OSMMap;\n\nclass map_point\n{\npublic:\n double lon; //经度\n double lat; //纬度\n int64_t id; //在OSM地图里面的id\n};\n\n\n\n//限速牌\nclass limspeed\n{\npublic:\n std::string limspeed_id; //地图没有限速牌,id=lon|lat\n int32_t active_index; //车辆从center_line[active_index]开始按limspeed_value速度行驶\n int32_t limspeed_value; //限速值,单位:km/h\n int16_t alarm_flag; //告警标志\n\n limspeed()\n {\n active_index = -1;\n limspeed_value = 20.0;\n alarm_flag = 0;\n }\n};\n\n//施工标志\nclass block\n{\npublic:\n std::string block_id; //地图没有施工标志,id=lon|lat\n int32_t stop_index; //车辆停止在center_line[stop_index]位置\n int32_t block_value; //限速值,单位:km/h\n int32_t lane_count; //车道数量\n int16_t alarm_flag; //告警标志\n\n block()\n {\n stop_index = -1;\n block_value = 0;\n lane_count = 2;\n alarm_flag = 0;\n }\n\n //输入当前车道,lane_index=左数车道,第一道是1\n //返回规避方法: CL_DIRECTION_NONE=不阻塞,LEFT=向左换道规划,RIGHT=向右换道规划,BLOCK=堵死\n int check(int lane_index,int op_lane_size = -1);\n};\n\nclass light\n{\npublic:\n int64_t id;\n double mileage; //mileage: + m; active -> stop\n map_point stop_point;\n map_point exit_point;\n map_point light_point;\n};\n\nclass cross_regulator\n{\npublic:\n std::string name;//路口名称\n int type; //路口类型\n std::vector<light> flow_light_list_; //flow_light_list 车流的红绿灯\n //vector<map_point> points_; //车流里的与红绿灯相关的点\n};\n\n/**\n* @brief 车道描述\n* 包含每个车道的id、当前的兄弟车道、对向的兄弟车道、可换道的车道、车道长度、最大速度、道路等级\n*/\nclass lane\n{\npublic:\n int64_t lane_id; ///<当前lane的id\n std::vector<int64_t> cur_brother_lane; ///<当前车辆所在方向,所有兄弟车道,从左向右排列,车辆换道后修改lane_id。\n std::vector<int64_t> opp_brother_lane; ///<逆向兄弟车道,从左向右排列。\n std::vector<int64_t> change_lane; ///<当前车道所在的可换道的车道列表,从左向右排列,包含了可以借道的区域。\n double length; ///<车道长度,单位:m\n double max_speed; ///<最大限速,单位:km/h\n int32_t road_level; ///<道路等级\n\n lane():lane_id(-1), max_speed(60), road_level(0) {}\n};\n}\n}\n#endif // _MAP_DATA_H\n"
},
{
"alpha_fraction": 0.7873918414115906,
"alphanum_fraction": 0.8220024704933167,
"avg_line_length": 23.484848022460938,
"blob_id": "d8791e8533274ac56bdc1392481305c79c21546e",
"content_id": "e921c7586f2e2a3e6aa2337cc631bcace0911547",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1425,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 33,
"path": "/athena/examples/LCM/Singlecar/control/测试方法.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#测试方法\n##文件描述\n-- feedbacktest.m \nmatlab数据解析文件,在matlab中运行后生成轨迹图,误差贡献图,误差图\n解析每次运行生成的log文件 feedback_lateral+time.log\n-- looppath.m \nmatlab数据解析文件,在matlab中运行后生成looptest.txt的分析图\n解析looptest.txt轨迹文件,用于查看数据是否正常\n-- looptest.txt \n轨迹文件,使用map_logger录制得来,存放不同地方的轨迹数据,数据存放于 /log文件夹\n-- feedback_lateral2018_11_8 12_16_28.log eg\n测试log文件,每次运行control都会生成一个,文件名格式feedback_lateral年_月_日 时_分_秒.log\n-- /bin/map_logger\n轨迹录制文件,完成后改名为looptest.txt即可使用,对应的组播配置文件在controller_value.cfg\n\n##测试步骤\n###生成轨迹文件\n1.配置组播地址统一\n2.运行vehicle_interface在目标机TX2\n3.运行map_logger\n4.开始人工驾驶车辆\n5.结束map_logger,将生成的文件重命名为looptest位置.txt\n6.复制新文件到/log文件夹备份\n7.复制需要的轨迹文件到根目录,并重命名为looptest.txt\n\n###运行控制程序并分析\n1.配置组播地址统一\n2.运行vehicle_interface在目标机\n3.运行control\n4.开始自动驾驶\n5.结束自动驾驶,最好在轨迹跑完前结束\n6.新生成log文件 feedback_lateral年_月_日 时_分_秒.log,复制该文件名到feedbacktest.m 对应位置\n7.运行matlab开始分析\n\n"
},
{
"alpha_fraction": 0.35571688413619995,
"alphanum_fraction": 0.3666061758995056,
"avg_line_length": 21.040000915527344,
"blob_id": "083edcdf463e873ebe7f4a1fe7a23e1aafb27101",
"content_id": "26a43cff2371836d4dbb94b4ead618714f470126",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 551,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 25,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/lm_type.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "////////////////////////////////////////////////////////////////////////////////\n#ifndef\t_LM_TYPE_H_\n#define\t_LM_TYPE_H_\n////////////////////////////////////////////////////////////////////////////////\n// system header files\n\n////////////////////////////////////////////////////////////////////////////////\n// personal header files and macros\ntypedef enum {\n\tLM_UP = 0,\n\tLM_DOWN = 1\n} LM_UpDown;\n\ntypedef enum {\n\tLM_LEFT = 0,\n\tLM_RIGHT = 1\n} LM_LeftRight;\n\ntypedef enum {\n\tLM_POSITIVE = 0,\n\tLM_NEGATIVE = 1\n} LM_PositiveNegative;\n\n\n#endif\t_LM_TYPE_H_\n"
},
{
"alpha_fraction": 0.6746463775634766,
"alphanum_fraction": 0.6849836707115173,
"avg_line_length": 20.372093200683594,
"blob_id": "3938bc280741c4f68523d7f04f0d252d27d91b64",
"content_id": "dec9d990c20d74ca9a05fffcd669d9a2c2cebf68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1932,
"license_type": "no_license",
"max_line_length": 180,
"num_lines": 86,
"path": "/athena/core/arm/Control/include/generic_controller.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file generic_controller.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n/**\n * @file\n * @brief Defines the GenericController class.\n */\n#ifndef GENERIC_CONTROLLER_H_\n#define GENERIC_CONTROLLER_H_\n\n#include \"common/path.h\"\n#include \"controller_config.h\"\n#include \"controller_output.h\"\n#include \"localization.h\"\n#include \"local_localization.h\"\n#include \"chassis.h\"\n#include \"debug_output.h\"\n\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class LonController\n *\n * @brief Longitudinal controller, to compute brake and driving force values.\n */\nclass GenericController{\n public:\n\n /**\n * @brief constructor\n */\n GenericController() = default;\n\n /**\n * @brief destructor\n */\n ~GenericController() = default;\n\n /**\n * @brief init.\n * @param[in] controller_config controller config.\n * @return true or false.\n */\n virtual bool Init(const ControllerConfig controller_config) = 0;\n\n /**\n * @brief ComputeControlOutput.\n * @param[in] path 轨迹.\n * @param[in] match_point_no 轨迹匹配索引.\n * @param[in] localiation 定位信息.\n * @param[in] chassis 车辆底盘信息.\n * @param[in] ControllerOutput 控制器输出.\n * @return true or false.\n */\n virtual bool ComputeControlOutput(path * path,uint32_t match_point_no,const LocalLocalization * local_localiation,const Chassis *chassis,ControllerOutput * controller_output) = 0;\n\n /**\n * @brief 获取调试信息.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n virtual void GetControllerDebugInfo(DebugOutput &debug_output) = 0;\n\n /**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n virtual void SetTarSpeedDebug(int32_t tar_speed,bool valid) = 0;\n};\n}//namespace control\n}//namespace athena\n#endif //GENERIC_CONTROLLER_H_\n"
},
{
"alpha_fraction": 0.6952437162399292,
"alphanum_fraction": 0.701702892780304,
"avg_line_length": 27.86440658569336,
"blob_id": "23dcaa37ceaa7c8a730517e03d284b2b6a1b329a",
"content_id": "114e798b285a9baacc073554f18e776d949edbe0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1704,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 59,
"path": "/athena/core/x86/Map/include/Lanelet.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n * \n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include \"LaneletBase.hpp\"\n#include \"Attribute.hpp\"\n#include \"LineStrip.hpp\"\n#include \"lanelet_point.hpp\"\n#include \"RegulatoryElement.hpp\"\n#include \"LaneletFwd.hpp\"\n\n#include <tuple>\n#include <vector>\n\nnamespace LLet\n{\n\nclass Lanelet : public LaneletBase, public HasAttributes\n{\npublic:\n Lanelet( );\n Lanelet( int64_t id, const strip_ptr_t& left, const strip_ptr_t& right );\n\n\n virtual const std::tuple< strip_ptr_t, strip_ptr_t >& bounds() const;\n virtual const std::vector< regulatory_element_ptr_t >& regulatory_elements() const;\n std::vector< regulatory_element_ptr_t >& regulatory_elements();\n\n int64_t id() const;\n\n void add_regulatory_element( const regulatory_element_ptr_t& elem );\n\nprivate: \n int64_t _id; \n std::tuple< strip_ptr_t, strip_ptr_t > _bounds;\n std::vector< regulatory_element_ptr_t > _regulatory_elements;\n\n};\n\n}\n\nstd::ostream& operator<<( std::ostream& out, const LLet::lanelet_ptr_t& lanelet );\n"
},
{
"alpha_fraction": 0.6351230144500732,
"alphanum_fraction": 0.6378076076507568,
"avg_line_length": 38.55457305908203,
"blob_id": "45441d5c377b60b5e794c253f5f73735edb68958",
"content_id": "2aeb8aec386a4d8e8cf8297c93c8ef1ae3724ef3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 16275,
"license_type": "no_license",
"max_line_length": 160,
"num_lines": 339,
"path": "/athena/examples/LCM/Singlecar/planning/config.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "//Config.h\n#pragma once\n\n#include <string>\n#include <map>\n#include <iostream>\n#include <fstream>\n#include <sstream>\n\nextern std::string OBU_URL;\n\n///<档位参数\nextern int AT_STATUS_P; ///<档位信息,P档\nextern int AT_STATUS_R; ///<档位信息,R档\nextern int AT_STATUS_N; ///<档位信息,N档\nextern int AT_STATUS_D; ///<档位信息,D档\nextern int AT_STATUS_M; ///<档位信息,M档\n\n///<车型相关参数\nextern double WHEEL_BASE; ///<车轮轴长,单位:米\nextern double CAR_LENGTH; ///<车身长,单位:米\nextern double CAR_WIDTH; ///<车身宽,单位:米\nextern double CAR_HIGH; ///<车身高,单位:米\nextern double CAR_WEIGHT; ///<车身质量,单位kg\nextern double CAR_MIN_R; ///<车最小转弯半径,单位:米\nextern double MAX_STEERING_ANGLE; \t ///<最大方向盘转角\nextern double MIN_STEERING_ANGLE; ///<最小方向盘转角\nextern double STEERING_RATIO; ///<方向盘和车轮转角的比例关系\n\n///<泊车相关参数\nextern double SAFE_DISTANCE_PARK; ///<停车的安全保护距离(前后)\nextern double SAFE_WIDTH_PARK; ///<停车的安全保护宽度(两侧)\nextern double PARK_LANE_WIDTH; ///<泊车时泊车通道的道路宽度\nextern double H_MIN; ///<泊车时纵向距离H的最小值\nextern double H_MAX; ///<泊车时纵向距离H的最大值\nextern double H_STEP; ///<泊车时纵向距离H的取值步长\nextern double S_STEP; ///<泊车时横向距离S的取值步长\nextern double DELTA_S_MAX; ///<泊车时横向距离S向前搜索的范围\nextern double EXTENDED_LINE_OF_PARALLEL; ///<平行泊车时向前延展的距离\nextern double EXTENDED_LINE_OF_VERTICAL; ///<垂直泊车时向前延展的距离\nextern double PARK_SPEED; ///<泊车时的速度,单位:km/h\nextern double THRESHOLD_START_PARK; ///<车与车库中心的距离小于该阈值时,可以触发泊车\nextern double THRESHOLD_CAR_STATIC_SPEED; ///<本车车速小于该阈值可以认为车静止,单位:m/s\nextern int NUM_EXTEND_TRAJECTORY; ///<给控制发轨迹时,需要延长一些(点的个数)\n\n///<地图匹配参数\nextern int SOURCE_OF_CAR_SPEED;\t\t\t ///0:从惯导获得本车当前速度;1:从can信号获得本车当前速度\nextern int PRIOR_MATCH_LANE;\t\t\t ///<优先匹配车道,1=低速道,0=高速道\nextern double THRESHOLD_MATCH_CENTERS;\t\t ///<匹配多车道中心线的阈值(米)\nextern double GLOBAL_SEARCH_MATCH_CENTERS;\t ///<匹配车道中心线粗搜索时的限差范围(米)\nextern int NUM_BEFORE_MATCH_CENTERS;\t\t ///<匹配车道中心线时向前搜索的点数\nextern double THRESHOLD_MATCH_BIAS;\t\t ///<点匹配到中心线的距离过大,认为匹配失败(米)\nextern double COEF1_MOTION_WITHOUT_VP;\t\t ///<没有virtual_path时(第一次规划目的地),计算MOTION_PLANNING_LENGTH的系数1(常数项)\nextern double COEF2_MOTION_WITHOUT_VP;\t\t ///<没有virtual_path时(第一次规划目的地),计算MOTION_PLANNING_LENGTH的系数2(横向偏差系数)\nextern double COEF1_AHEAD_WITHOUT_VP;\t\t ///<没有virtual_path时(第一次规划目的地),计算AHEAD_OF_MOTION_PLANNING的系数1(常数项)\nextern double COEF2_AHEAD_WITHOUT_VP;\t\t ///<没有virtual_path时(第一次规划目的地),计算AHEAD_OF_MOTION_PLANNING的系数2(速度系数)\nextern double COEF1_MOTION_WITH_VP;\t\t ///<有virtual_path时,计算MOTION_PLANNING_LENGTH的系数1(常数项)\nextern double COEF2_MOTION_WITH_VP;\t\t ///<有virtual_path时,计算MOTION_PLANNING_LENGTH的系数2(速度系数)\nextern double THRESHOLD_HIGH_SPEED;\t\t ///<速度高时用另一套系数\nextern double COEF1_MOTION_HIGH_SPEED;\t\t ///<高速时,计算MOTION_PLANNING_LENGTH的系数1(常数项)\nextern double COEF2_MOTION_HIGH_SPEED;\t\t ///<高速时,计算MOTION_PLANNING_LENGTH的系数2(速度系数)\nextern double COEF1_AHEAD_WITH_VP;\t\t ///<有virtual_path时,计算AHEAD_OF_MOTION_PLANNING的系数1(常数项)\nextern double COEF2_AHEAD_WITH_VP;\t\t ///<有virtual_path时,计算AHEAD_OF_MOTION_PLANNING的系数2(速度系数)\nextern double MIN_MOTION_LENGTH;\t\t\t ///<一次规划的最小长度(米)\nextern double MAX_MOTION_LENGTH;\t\t \t///<一次规划的最大长度(米)\nextern double MAX_MOTION_DELTA_HEADING;\t\t ///<一次规划的最大角度差(度)\nextern double INTERVAL_MAP_SECTION;\t\t ///<下发的地图切片点的间隔(米)\nextern double SPLINE_EVERY;\t\t\t ///<规划轨迹点间隔(米)\nextern double MAP_SPLINE_EVERY;\t\t\t ///<地图中心线点间隔(米)\nextern double MATCH_STOP_POINT_ERROR;\t\t ///<匹配停车点时点距离线的最小限差(米)\nextern int TRAFFIC_LIGHTS_CHECKS_LENGTH; ///<路口红绿灯停车点的检查距离(个)\nextern int BEFORE_LIGHTS; ///<路口提前停车距离 (个)\n\n///<障碍物\nextern int NUMBER_BACKWARD;\t\t\t ///<障碍物根据边界过滤时向后搜索的点数\nextern int NUMBER_FORWARD;\t\t\t ///<障碍物根据边界过滤时向前搜索的点数\nextern double PEDESTRIAN_WIDEN_DIS;\t\t ///<行人加宽距离范围(米)\nextern double PEDESTRIAN_WIDEN_ANG;\t\t ///<行人加宽角度范围(度)\nextern double PEDESTRIAN_WIDEN_WIDTH;\t\t ///<行人加宽的宽度(米)\nextern double CAR_LENGTHEN_LENGTH;\t\t ///<障碍车加长的长度(米)\nextern int OBSTACLE_COST_VALUE;\t\t ///<障碍物的代价值\nextern int B_READ_OBSTACLE_SPEED;\t\t ///<0:不读取障碍物速度;1:读取障碍物速度\n\n///<碰撞检测\nextern int COL_CHECK_INTERVAL;\t\t\t ///<碰撞检测时,虚拟车道的搜索间隔(个)\nextern double THRESHOLD_DELTA_LENGTH;\t \t ///<虚拟车道搜索时,前后点间距超过一定的阈值,就调整搜索间隔(米)\nextern double THRESHOLD_STATIC_SPEED;\t\t ///<速度小于THRESHOLD_STATIC_SPEED m/s,认为是静态障碍物,需要停车或避障(m/s)\nextern double RATIO_SPEED_CAR_FOLLOWING; ///<障碍物速度小于本车速度该比例时,选择超车\nextern double CAR_FOLLOWING_SPEED_DIFF;\t\t ///<调整车速略小于跟随的障碍物速度(m/s)\nextern double SAFE_WIDTH; \t\t\t ///<安全保护的距离(米)\nextern double SAFE_LENGTH; \t\t\t ///<安全保护的距离(米)\nextern double FREE_LENGTH_DIFF;\t\t\t ///<如果车道都有碰撞,取碰撞距离更大的(米)\nextern int COUNT_COLLISION;\t\t\t ///<持续超过COUNT_COLLISION帧有碰撞才进行重规划,否则只是减速\n//# SLOW_DOWN_STEP 0.1 #减速的步长(程序中会重新计算)\nextern double COEF_SLOW_DOWN_STEP;\t\t ///<SLOW_DOWN_STEP 0.5 * motion_tar_speed / COUNT_COLLISION;\nextern double COEF_COL_CHECK_LENGTH;\t ///<决定碰撞检测长度:COLLISION_CHECK_LENGTH=COEF_COL_CHECK_LENGTH * ( MOTION_PLANNING_LENGTH + AHEAD_OF_MOTION_PLANNING );\nextern int COUNT_SLOW;\t\t\t\t ///<遇见障碍物减速后保持低速一段时间\n\n///<轨迹规划相关参数\nextern double COEF_UPDATE_TRAJECTORY; \t\t ///<更新轨迹的比例参数,决定走过多少里程更新一次轨迹\nextern int AHEAD_OF_MOTION_PLANNING;\t\t ///<单位(米)\nextern int MOTION_PLANNING_LENGTH;\t\t ///<单位(米)\nextern int AFTER_MOTION_PLANNING_NUM;\t\t ///<单位(个)\n\n///<轨迹生成【横向】\nextern double LAT_OFFSET; \t\t\t ///<用于生成轨迹范围(左右各LAT_OFFSET米)\nextern double COEF_LIMIT_LAT_STEP;\t\t ///<拨杆换道限制、障碍物减速过程或者经过路口时,横向偏移范围收缩的比例系数\nextern int NUM_TRAJ_CLUSTER;\t\t\t ///<生成轨迹簇的个数(2×NUM_TRAJ_CLUSTER+1)\nextern int NUM_BACKWARD_TRAJ_CHECK;\t\t ///<轨迹边界检测时向后搜索的点数(个)\nextern int NUM_FORWARD_TRAJ_CHECK;\t\t ///<轨迹边界检测时向前搜索的点数(个)\nextern int OUT_SIDE_INTERVAL;\t\t\t ///<判断轨迹是否出界时的搜索步长(个)\nextern int OUT_LINE_COST_VALUE;\t\t ///<出车道线的代价值\nextern int OUT_EDGE_COST_VALUE;\t\t ///<出道路边缘的代价值\nextern double COEF_END_POS;\t\t\t ///<综合计算代价值时,终点代价值的系数\nextern double COEF_COL_CHECK;\t\t\t ///<综合计算代价值时,碰撞代价值的系数\nextern double COEF_LEFT;\t\t\t\t ///<综合计算代价值时,左边线代价值的系数\nextern double COEF_RIGHT;\t\t\t ///<综合计算代价值时,右边线代价值的系数\nextern double COEF_KS;\t\t\t ///<综合计算代价值时,曲率代价值的系数\nextern double THRESHOLD_KS;\t\t\t ///<当曲率大于THRESHOLD_KS时,考虑曲率代价值\nextern int THRESHOLD_COST_VALUE;\t\t ///<当最优路径的代价值还大于THRESHOLD_COST_VALUE时,需要停车\n\n///<各种测试开关\nextern int MAP_LOG_SWITCH;\nextern int TRAJECTORY_LOG_SWITCH;\nextern int TRAFFIC_LIGHTS_LOG_SWITCH;\nextern int CHANGE_LANE_LOG_SWITCH;\nextern int OBSTACLES_LOG_SWITCH;\nextern int LONGITUDINAL_CONTROL_LOG_SWITCH;\nextern int MAP_MATCHING_LOG_SWITCH;\nextern int SELECT_VALUE_LOG_SWITCH;\nextern int VIRTUAL_PATH_LOG_SWITCH;\t ///<实时匹配虚拟轨迹的状态\n\nextern int TRAJECTORY_VIEW_SWITCH;\nextern int SPEED_PLAN_VIEW_SWITCH;\nextern int CHANGE_LANE_VIEW_SWITCH;\nextern int LATERAL_CONTROL_VIEW_SWITCH;\nextern int LONGITUDINAL_CONTROL_VIEW_SWITCH;\nextern int MAP_MATCHING_VIEW_SWITCH;\nextern int COLLISION_CHECK_VIEW_SWITCH;\nextern int PLANNING_VALUE_VIEW_SWITCH;\nextern int NEXTWORK_CHANGELANE_VIEW_SWITCH;\nextern int SELECT_VALUE_VIEW_SWITCH;\n\n///<轨迹生成【纵向】\nextern int STOP_LENGTH_TO_OBS;\t ///<停车位置距离障碍物的距离\nextern double COEF_KS_SPEED;\t\t ///<速度、曲率转换\nextern double MIN_SPEED;\t\t\t\t ///<最小速度(m/s)\nextern double MAX_ACCELERATION;\t\t\t ///<最大加速度\nextern double MAX_DECELERATION;\t\t\t ///<最大减速度\n\n///<origin point\nextern double ORIGIN_LAT;\t ///<坐标原点纬度 #shanghai\nextern double ORIGIN_LON; ///<坐标原点经度\n\nbool read_motion_plan_config_value_from_file();\nbool write_config_value_from_file();\n\n\n\n/************************************************************* Config ************************************************************************/\n/*\n* \\brief Generic configuration Class\n*\n*/\nclass Config {\n\t// Data\nprotected:\n\tstd::string m_Delimiter; //!< separator between key and value\n\tstd::string m_Comment; //!< separator between value and comments\n\tstd::map<std::string,std::string> m_Contents; //!< extracted keys and values\n\n\ttypedef std::map<std::string,std::string>::iterator mapi;\n\ttypedef std::map<std::string,std::string>::const_iterator mapci;\n\t// Methods\npublic:\n\n\tConfig( std::string filename,std::string delimiter = \"=\",std::string comment = \"#\" );\n\tConfig();\n\ttemplate<class T> T Read( const std::string& in_key ) const; //!<Search for key and read value or optional default value, call as read<T>\n\ttemplate<class T> T Read( const std::string& in_key, const T& in_value ) const;\n\ttemplate<class T> bool ReadInto( T& out_var, const std::string& in_key ) const;\n\ttemplate<class T>\n\tbool ReadInto( T& out_var, const std::string& in_key, const T& in_value ) const;\n\tbool FileExist(std::string filename);\n\tvoid ReadFile(std::string filename,std::string delimiter = \"=\",std::string comment = \"#\" );\n\n\t// Check whether key exists in configuration\n\tbool KeyExists( const std::string& in_key ) const;\n\n\t// Modify keys and values\n\ttemplate<class T> void Add( const std::string& in_key, const T& in_value );\n\tvoid Remove( const std::string& in_key );\n\n\t// Check or change configuration syntax\n\tstd::string GetDelimiter() const { return m_Delimiter; }\n\tstd::string GetComment() const { return m_Comment; }\n\tstd::string SetDelimiter( const std::string& in_s )\n\t{ std::string old = m_Delimiter; m_Delimiter = in_s; return old; }\n\tstd::string SetComment( const std::string& in_s )\n\t{ std::string old = m_Comment; m_Comment = in_s; return old; }\n\n\t// Write or read configuration\n\tfriend std::ostream& operator<<( std::ostream& os, const Config& cf );\n\tfriend std::istream& operator>>( std::istream& is, Config& cf );\n\nprotected:\n\ttemplate<class T> static std::string T_as_string( const T& t );\n\ttemplate<class T> static T string_as_T( const std::string& s );\n\tstatic void Trim( std::string& inout_s );\n\n\n\t// Exception types\npublic:\n\tstruct File_not_found {\n\t\tstd::string filename;\n\t\tFile_not_found( const std::string& filename_ = std::string() )\n\t\t\t: filename(filename_) {} };\n\t\tstruct Key_not_found { // thrown only by T read(key) variant of read()\n\t\t\tstd::string key;\n\t\t\tKey_not_found( const std::string& key_ = std::string() )\n\t\t\t\t: key(key_) {} };\n};\n\n\n/* static */\ntemplate<class T>\nstd::string Config::T_as_string( const T& t )\n{\n\t// Convert from a T to a string\n\t// Type T must support << operator\n\tstd::ostringstream ost;\n\tost << t;\n\treturn ost.str();\n}\n\n\n/* static */\ntemplate<class T>\nT Config::string_as_T( const std::string& s )\n{\n\t// Convert from a string to a T\n\t// Type T must support >> operator\n\tT t;\n\tstd::istringstream ist(s);\n\tist >> t;\n\treturn t;\n}\n\n\n/* static */\ntemplate<>\ninline std::string Config::string_as_T<std::string>( const std::string& s )\n{\n\t// Convert from a string to a string\n\t// In other words, do nothing\n\treturn s;\n}\n\n\n/* static */\ntemplate<>\ninline bool Config::string_as_T<bool>( const std::string& s )\n{\n\t// Convert from a string to a bool\n\t// Interpret \"false\", \"F\", \"no\", \"n\", \"0\" as false\n\t// Interpret \"true\", \"T\", \"yes\", \"y\", \"1\", \"-1\", or anything else as true\n\tbool b = true;\n\tstd::string sup = s;\n\tfor( std::string::iterator p = sup.begin(); p != sup.end(); ++p )\n\t\t*p = toupper(*p); // make string all caps\n\tif( sup==std::string(\"FALSE\") || sup==std::string(\"F\") ||\n\t\tsup==std::string(\"NO\") || sup==std::string(\"N\") ||\n\t\tsup==std::string(\"0\") || sup==std::string(\"NONE\") )\n\t\tb = false;\n\treturn b;\n}\n\n\ntemplate<class T>\nT Config::Read( const std::string& key ) const\n{\n\t// Read the value corresponding to key\n\tmapci p = m_Contents.find(key);\n\tif( p == m_Contents.end() ) throw Key_not_found(key);\n\treturn string_as_T<T>( p->second );\n}\n\n\ntemplate<class T>\nT Config::Read( const std::string& key, const T& value ) const\n{\n\t// Return the value corresponding to key or given default value\n\t// if key is not found\n\tmapci p = m_Contents.find(key);\n\tif( p == m_Contents.end() ) return value;\n\treturn string_as_T<T>( p->second );\n}\n\n\ntemplate<class T>\nbool Config::ReadInto( T& var, const std::string& key ) const\n{\n\t// Get the value corresponding to key and store in var\n\t// Return true if key is found\n\t// Otherwise leave var untouched\n\tmapci p = m_Contents.find(key);\n\tbool found = ( p != m_Contents.end() );\n\tif( found ) var = string_as_T<T>( p->second );\n\treturn found;\n}\n\n\ntemplate<class T>\nbool Config::ReadInto( T& var, const std::string& key, const T& value ) const\n{\n\t// Get the value corresponding to key and store in var\n\t// Return true if key is found\n\t// Otherwise set var to given default\n\tmapci p = m_Contents.find(key);\n\tbool found = ( p != m_Contents.end() );\n\tif( found )\n\t\tvar = string_as_T<T>( p->second );\n\telse\n\t\tvar = value;\n\treturn found;\n}\n\n\ntemplate<class T>\nvoid Config::Add( const std::string& in_key, const T& value )\n{\n\t// Add a key with given value\n\tstd::string v = T_as_string( value );\n\tstd::string key=in_key;\n\tTrim(key);\n\tTrim(v);\n\tm_Contents[key] = v;\n\treturn;\n}\n\n"
},
{
"alpha_fraction": 0.649092972278595,
"alphanum_fraction": 0.6564626097679138,
"avg_line_length": 19.045454025268555,
"blob_id": "4b43ad81462733b7cc38d685a4c6f70ceb3a7796",
"content_id": "094aac9c8a1afc05cdad96c956473c59a77306b5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1832,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 88,
"path": "/athena/examples/LCM/Singlecar/control/apps/control_debug/control_debug.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control_debug.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_CONTROL_DEBUG_CONTROL_DEBUG_H_\n#define APPS_CONTROL_DEBUG_CONTROL_DEBUG_H_\n\n#include \"../../common/local_timer.h\"\n#include \"../control.h\"\n#include \"../../common/timer_app.h\"\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class ControlDebug\n * @brief 控制调试.\n */\n\ntemplate<class T>\nclass ControlDebug\n{\n typedef void (T::*PrintFun)();\n typedef void (T::*LogFun)();\n\npublic:\n void AddPrint(int print_interval,PrintFun,T *handler);\n void AddLog(int log_interval,LogFun log_fun,T *handler);\n void PrintOnTimer();\n void LogOnTimer();\n\nprivate:\n int print_interval_;\n int log_interval_;\n PrintFun print_fun_;\n LogFun log_fun_;\n T *handler_;\n};\n\n//设置调用对象及其回调函数\ntemplate<class T>\nvoid ControlDebug<T>::AddPrint(int print_interval,PrintFun print_fun,T *handler)\n{\n print_interval_ = print_interval;\n print_fun_ = print_fun;\n handler_ = handler;\n\n if(print_interval != 0)\n TimerApp<ControlDebug>::AddTimer(print_interval_,&ControlDebug::PrintOnTimer,this);\n};\n\n//设置调用对象及其回调函数\ntemplate<class T>\nvoid ControlDebug<T>::AddLog(int log_interval,LogFun log_fun,T *handler)\n{\n log_fun_ = log_fun;\n log_interval_ = log_interval;\n handler_ = handler;\n\n if(log_interval != 0)\n TimerApp<ControlDebug>::AddTimer(log_interval_,&ControlDebug::LogOnTimer,this);\n};\n\n//调用回调函数\ntemplate<class T>\nvoid ControlDebug<T>::PrintOnTimer()\n{\n (handler_->*print_fun_)();\n}\n\ntemplate<class T>\nvoid ControlDebug<T>::LogOnTimer()\n{\n (handler_->*log_fun_)();\n}\n}\n}\n#endif //APPS_CONTROL_DEBUG_CONTROL_DEBUG_H_\n"
},
{
"alpha_fraction": 0.6390658020973206,
"alphanum_fraction": 0.7317763566970825,
"avg_line_length": 21.79032325744629,
"blob_id": "6cf36aa91c1c0e17a34690252c9607d27d28436e",
"content_id": "e01aa13c92ff302268abf8ee5066fc7793fc8e3b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1531,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 62,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/config2.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "SHIFT_JIS",
"text": "#ifndef _CONFIG2_H_\n#define _CONFIG2_H_\n\n//#define\tFOR_DLL\t// 20081103\n//#define\tONLY_FOR_VP_DETECTION\t// 20081103\n#include \"type.h\"\n#pragma warning( disable : 4996 )\n#pragma warning( disable : 4819 )\n//#pragma warning( disable : 1786 )\n#pragma warning( disable : 157 )\n#pragma warning( disable : 4311 )\n#pragma warning( disable : 4312 )\n\n#define\tAW_DEBUG\n#define\tINPUT_IMAGE_WIDTH\t1280\t\t// 入力画像の幅[pixel]\n#define\tINPUT_IMAGE_HEIGHT\t720\t\t// 入力画像の高さ[pixel]\n#define\tROAD_IMAGE_WIDTH\t480\t\t// 入力画像の幅[pixel]\n#define\tROAD_IMAGE_HEIGHT\t960\t\t// 入力画像の高さ[pixel]\n\n#ifndef\tM_PI\n#define\tM_PI\t(3.141592)\n#endif\n\n#define BUFSIZE 1024\n\n#define\tFALSE\t0\n#define\tTRUE\t1\n\n//#define CHECK_MEMORY_LEAK\n\n//#define\tCALC_PEAK_TH_BY_MAX_PEAK\n#define\tERASE_CROSSING_PEAKS\n\n// 平行線とみなすための両側車線境界の曲率の差\n//#define\tCB_DIFF_OF_BOTH_SIDE_CURVATURE\t10\n//#define\tCB_DIFF_OF_BOTH_SIDE_CURVATURE\t10\t// 20050214\n#define\tCB_DIFF_OF_BOTH_SIDE_CURVATURE\t5\t// 20050215\n\n\n#define\tNO_EGOMOTION\t// 20050509\n\n#define\tSEARCH_LOCAL_MAXIMAM\t// 20050509\n//#define\tUSE_SOBEL\t// 20050509\n\n#define\tREGION_FOR_CURB_DETECTION\t1\n\n\n//#define\tVERTICAL_OFFSET_IN_IMAGE\t100\n\n#define INITIAL_FLAG_OF_OUTPUTAVI\tFALSE\n#define\tAPP_TITLE\t\"LaneDetector\"\t// アプリケーションタイトル\n\n\n#define\tDRAW_RESULT_ALWAYS\t// 20090709\n//#define\tDRAW_TO_UNDETECTED_AREA\n#define\tDISTANCE_OF_UNDECTED_AREA_TO_DRAW\t70000\n\n#define\tUSE_PNR_DIR 1\n//#define\tWITH_FCW_TAG\n\n#define\tDRAW_DISPARITY\n#endif _CONFIG2_H_?\n"
},
{
"alpha_fraction": 0.47911831736564636,
"alphanum_fraction": 0.48723897337913513,
"avg_line_length": 22.283782958984375,
"blob_id": "96af878e3ec778a0681cdff8f99eaa86326b90a7",
"content_id": "5ab0cb91384ff2798366531ec2877915b9d1baca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2114,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 74,
"path": "/athena/core/x86/Planning/include/common/navi_point.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 导航点属性,\n */\n\n#pragma once\n\n//#include <vector>\n#include <stdint.h>\n\n/**\n * @class navi_point\n * @brief 规划导航点,\n */\nclass navi_point\n{\npublic:\n double position_x; ///<x坐标\n double position_y; ///<y坐标\n double position_z; ///<z坐标\n\n double speed; ///<速度\n double lateral_speed; ///<速度横向分量\n double longitudinal_speed; ///<速度纵向分量\n double down_speed; ///<速度垂直方向分量\n\n double roll; ///<横滚角\n double pitch; ///<俯仰角\n double heading; ///<航向角\n\n double accelerataion; ///<加速度\n double lateral_accelerate; ///<加速度横向分量\n double longitudinal_accelerate; ///<加速度纵向分量\n double down_accelerate; ///<加速度垂直方向分量\n\n double steering_angle; ///<方向盘转角\n double steering_angle_speed; ///<方向盘转角速度\n\n double speed_desired_Uxs; ///<期望速度\n double acceleration_desired_Axs;///<期望加速度\n\n int point_no; ///<点号\n double pos_gps_time; ///<GPS时间\n\n int8_t gears; ///<档位\n double s; ///<里程\n double r; ///<该点代价值\n double k_s; ///<曲率\n\n int8_t type; ///<行为点类型,20:驶入路口点;21:红绿灯停车点;22:驶出路口点\n\n double lateral_offset; ///<横向偏移量\n\npublic:\n /**\n * @brief 构造函数\n */\n navi_point();\n /**\n * @brief 析构函数\n */\n ~navi_point();\n\n //int navi_point& operator= (const navi_point& src);\n};\n\n/**\n* @brief 计算两个导航点之间的欧几里得距离,\n* @param p1 输入量:第一个点。\n* @param p2 输入量:第二个点。。\n* @return p1和p2之间的直线距离。\n*/\ndouble length_of_two_navipoint(navi_point p1, navi_point p2);\n\n"
},
{
"alpha_fraction": 0.6455292105674744,
"alphanum_fraction": 0.6496350169181824,
"avg_line_length": 26.399999618530273,
"blob_id": "fcd7eae6b4272dafc6a5a4e28b99bf39abac3849",
"content_id": "a273549700c7eb177ce39fdd1b6036f3cc30df13",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2192,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 80,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/ComplexLaneBoundary.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/type.h\"\n#include \"../utils/config.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerInComplexLaneBoundary.h\"\n\nclass ComplexLaneBoundary\n{\n\nprivate:\n int _iMode;\n int _iLaneMarkerNumberCounter[MaximumLaneMarkerNumberInComplexLaneBoundary + 1];\n LaneMarkerInComplexLaneBoundary *_apLaneMarkerInComplexLaneBoundary[MaximumLaneMarkerNumberInComplexLaneBoundary];\n\n\npublic:\n inline ComplexLaneBoundary(void)\n {\n _iMode = CLBT_NONE;\n for(int iIdx = 0; iIdx <= MaximumLaneMarkerNumberInComplexLaneBoundary; iIdx++)\n {\n _apLaneMarkerInComplexLaneBoundary[iIdx] = NULL;\n _iLaneMarkerNumberCounter[iIdx] = 0;\n }\n }\n inline ~ComplexLaneBoundary(void)\n {\n for(int iIdx = 0; iIdx < MaximumLaneMarkerNumberInComplexLaneBoundary; iIdx++)\n {\n SAFE_DELETE(_apLaneMarkerInComplexLaneBoundary[iIdx]);\n }\n }\n inline int Mode(void)\n {\n return _iMode;\n }\n inline void Mode(int iV)\n {\n _iMode = iV;\n }\n inline int getLaneMarkerCounter(int iIdx)\n {\n return _iLaneMarkerNumberCounter[iIdx];\n }\n inline void clearLaneMarkerCounter(int iIdx)\n {\n _iLaneMarkerNumberCounter[iIdx] = 0;\n }\n inline void incLaneMarkerCounter(int iIdx)\n {\n _iLaneMarkerNumberCounter[iIdx]++;\n }\n\n inline void clearLaneMarkerCounter(void)\n {\n for(int iIdx = 0; iIdx <= MaximumLaneMarkerNumberInComplexLaneBoundary; iIdx++)\n {\n _iLaneMarkerNumberCounter[iIdx] = 0;\n }\n }\n\n inline LaneMarkerInComplexLaneBoundary *getLaneMarker(int iIdx)\n {\n if(iIdx < 0)\n return NULL;\n if(iIdx >= MaximumLaneMarkerNumberInComplexLaneBoundary)\n return NULL;\n return _apLaneMarkerInComplexLaneBoundary[iIdx];\n }\n\n inline void setLaneMarker(int iIdx, LaneMarkerInComplexLaneBoundary *p)\n {\n if(iIdx < 0)\n return ;\n if(iIdx >= MaximumLaneMarkerNumberInComplexLaneBoundary)\n return;\n SAFE_DELETE(_apLaneMarkerInComplexLaneBoundary[iIdx]);\n _apLaneMarkerInComplexLaneBoundary[iIdx] = p;\n }\n};\n"
},
{
"alpha_fraction": 0.5137332677841187,
"alphanum_fraction": 0.554163932800293,
"avg_line_length": 27.092592239379883,
"blob_id": "3a99cd3867e295ad350a2e394e4ac0903b8d2aab",
"content_id": "b7ec7e7ec588040b9175a2e40afc465fd150a376",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 5031,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 162,
"path": "/athena/examples/LCM/Singlecar/launch_truck/bin/launch.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\n#构造退出文件\necho \"#!/bin/sh\" > ./dd.sh\necho \"\" >> ./dd.sh\necho \"#杀掉某一个进程\" >> ./dd.sh\necho \"killp()\" >> ./dd.sh\necho \"{\" >> ./dd.sh\necho \" PROCESS=\\`ps -ef|grep \\$1|grep -v grep|grep -v PPID|grep -v codeblocks|awk '{ print \\$2}'\\`\" >> ./dd.sh\necho \" for i in \\$PROCESS\" >> ./dd.sh\necho \" do\" >> ./dd.sh\necho ' echo \"Kill the $1 process [ $i ]\"' >> ./dd.sh\necho \" kill -9 \\$i\" >> ./dd.sh\necho \" done\" >> ./dd.sh\necho \"}\" >> ./dd.sh\necho \"\" >> ./dd.sh\n\n#构造退出+删除日志文件\necho \"#!/bin/sh\" > ./ds.sh\necho \"./dd.sh\" >> ./ds.sh\necho \"if [ -x *.osm ];then\" >> ./ds.sh\necho \" rm ./*.osm\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x *.log ];then\" >> ./ds.sh\necho \" rm ./*.log\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -s ../log/ ];then\" >> ./ds.sh\necho \" rm ../log/*\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x ./1/LonTestData.txt ];then\" >> ./ds.sh\necho \" rm ./1/LonTestData.txt\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x ./2/LonTestData.txt ];then\" >> ./ds.sh\necho \" rm ./2/LonTestData.txt\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x ./3/LonTestData.txt ];then\" >> ./ds.sh\necho \" rm ./3/LonTestData.txt\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x ./1/lateral_control_log.txt ];then\" >> ./ds.sh\necho \" rm ./1/lateral_control_log.txt\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x ./2/lateral_control_log.txt ];then\" >> ./ds.sh\necho \" rm ./2/lateral_control_log.txt\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\necho \"if [ -x ./3/lateral_control_log.txt ];then\" >> ./ds.sh\necho \" rm ./3/lateral_control_log.txt\" >> ./ds.sh\necho \"fi\" >> ./ds.sh\n\n#拷贝某个网络进程,参数如:\"../src/obu/obu_planning/bin/Debug/obu_planning\" \"obu_planning_DF001\"\ncopyp()\n{\n if [ -f $1 ]; then\n cp $1 ./$2\n fi\n echo 'killp \"'$2'\"' >> ./dd.sh\n chmod +x ./$2\n\n if [ $3 = \"gdb\" ]; then\n #启动gdb执行的方法\n gnome-terminal -e 'bash -c \"echo r|gdb ./'$2'; exec bash\"'\n else\n #正常执行方法\n gnome-terminal -e 'bash -c \"./'$2'; exec bash\"'\n fi\n \n echo \"start: \"$2\n}\n\n#拷贝某个单车进程,参数如:\"1\" \"controller\"\ncarp()\n{\n if [ ! -d ./$1 ]; then\n mkdir ./$1\n fi\n if [ -f ./$2 ]; then\n cp ./$2 ./$1/$2\n fi\n if [ -f ./controller_value.cfg ]; then\n cp ./controller_value.cfg ./$1/controller_value.cfg\n fi\n if [ -f ./engine_map.txt ]; then\n cp ./engine_map.txt ./$1/engine_map.txt\n fi\n if [ -f ./looptest.txt ]; then\n cp ./looptest.txt ./$1/looptest.txt\n fi\n echo 'killp \"'$2'\"' >> ./dd.sh\n chmod +x ./$2\n\n #修改组播地址\n sed -i \"s/OBU_URL = udpm:\\/\\/239.255.*?ttl=3/OBU_URL = udpm:\\/\\/239.255.76.2$1:762$1?ttl=3/g\" ./$1/config/control.cfg\n sed -i \"s/OBU_URL = udpm:\\/\\/239.255.*?ttl=3/OBU_URL = udpm:\\/\\/239.255.76.2$1:762$1?ttl=3/g\" ./$1/planning_value.cfg\n sed -i \"s/OBU_URL = udpm:\\/\\/239.255.*?ttl=3/OBU_URL = udpm:\\/\\/239.255.76.2$1:762$1?ttl=3/g\" ./$1/motion_planning_value.cfg\n sed -i \"s/OBU_URL = udpm:\\/\\/239.255.*?ttl=3/OBU_URL = udpm:\\/\\/239.255.76.2$1:762$1?ttl=3/g\" ./$1/sensor.cfg\n \n #切换目录\n cd ./$1\n \n #正常执行方法\n gnome-terminal -e 'bash -c \"./'$2'; exec bash\"'\n \n #启动gdb执行的方法\n #gnome-terminal -e 'bash -c \"echo r|gdb ./'$2'; exec bash\"'\n \n #切换目录\n sleep 1\n cd ..\n\n echo \"start: \"$1\"/\"$2\n}\n\n#要替换为本机IP地址的csu、rsu的name\nset_ip()\n{\n csuname=\"csu\"\n rsuname=\"rsu_1\"\n IP=`ifconfig -a|grep inet|grep -v 127.0.0.1|grep -v inet6|grep -v 10.8.0.|awk '{print $2}'|tr -d \"addr:地址\"`\n sed -i \"s/csu name=\\\"$csuname\\\" ip=\\\".*\\\" port/csu name=\\\"$csuname\\\" ip=\\\"$IP\\\" port/g\" ../conf/config.xml\n sed -i \"s/rsu name=\\\"$rsuname\\\" ip=\\\".*\\\" port/rsu name=\\\"$rsuname\\\" ip=\\\"$IP\\\" port/g\" ../conf/config.xml\n echo \"set csu&rsu ip to: \"$IP\n}\n\n#开始启动\necho \"\"\necho \"start begin\"\n\n#步骤1:修改config.xml中的csu/rsu的ip为本机ip\nset_ip\n\n#步骤2:请编辑../conf/simulate.xml,选择启动哪些模块的仿真\n\n#步骤3:选择启动哪些进程,至少启动csu_planning/rsu_planning和本车的obu_planning\ncopyp \"../src/obu/obu_planning/bin/Debug/obu_planning\" \"obu_planning_60U5Z\"\ncopyp \"../src/sim/sim_system/bin/Debug/sim_system\" \"sim_system\"\ncopyp \"../src/sim/sim_vui/bin/Debug/sim_vui\" \"sim_vui_DF001\"\n\n\n#步骤4:选择启动单车进程,第1个参数是车号,注意不能修改启动顺序\ncarp \"1\" \"control\"\ncarp \"1\" \"sensor\"\ncarp \"1\" \"planning_view\"\ncarp \"1\" \"planning\"\n#carp \"1\" \"NXDMonitor\"\n#\n#carp \"2\" \"controller\"\n#carp \"2\" \"sensor\"\n#carp \"2\" \"motion\"\n#carp \"2\" \"NXDMonitor\"\n#\n#carp \"3\" \"controller\"\n#carp \"3\" \"sensor\"\n#carp \"3\" \"motion\"\n#carp \"3\" \"LcmMonitor\"\n\necho \"start ok!\"\n\n#步骤6:使用 ./dd.sh 杀掉所有启动的进程\nchmod +x ./dd.sh\nchmod +x ./ds.sh\necho \"提示:可以执行 ./dd.sh 杀掉所有启动的进程\"\necho \"提示:可以执行 ./ds.sh 杀掉所有启动的进程,删除所有日志文件\"\necho \"\"\n"
},
{
"alpha_fraction": 0.6027956008911133,
"alphanum_fraction": 0.6156086325645447,
"avg_line_length": 23.884057998657227,
"blob_id": "de11c32487fd279ae29ee02417566e8b5e0e2b42",
"content_id": "5ac131176cfa5577e1e9ab1094402683092a3395",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1735,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 69,
"path": "/athena/examples/LCM/Singlecar/control/common/get_time.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"get_time.h\"\n#include <iostream>\n\nusing namespace std;\n\nnamespace athena{\nnamespace control{\n\n\n GetTime::TimeFormat GetTime::gps_local_time_;\n\n void GetTime::GetSystemTime(int &year,int &month,int &day,int &hour,int &minute,int &second,int &millisecond)\n {\n struct timeval tv;\n struct timezone tz;\n struct tm *t;\n //将tm时间转换为秒时间\n\n gettimeofday(&tv, &tz);\n t = localtime(&tv.tv_sec);\n\n year = 1900+t->tm_year;\n month = 1+t->tm_mon;\n day = t->tm_mday;\n hour = t->tm_hour;\n minute = t->tm_min;\n second = t->tm_sec;\n millisecond = tv.tv_usec/1000;\n }\n\n void GetTime::GetGpsCurrentTime(int &year,int &month,int &day,int &hour,int &minute,int &second,int &millisecond)\n {\n year = gps_local_time_.year;\n month = gps_local_time_.month;\n day = gps_local_time_.day;\n hour = gps_local_time_.hour;\n minute = gps_local_time_.minute;\n second = gps_local_time_.second;\n millisecond = gps_local_time_.millisecond;\n }\n\n void GetTime::SetGpsCurrentUtcTime(int utc_year,int utc_month,int utc_day,int utc_hour,int utc_minute,int utc_second,int utc_millisecond)\n {\n struct tm t;\n struct tm *t2;\n t.tm_year= utc_year;\n t.tm_mon = utc_month;\n t.tm_mday = utc_day;\n t.tm_hour = utc_hour;\n t.tm_min = utc_minute;\n t.tm_sec = utc_second;\n\n time_t t_;\n\n t_ = mktime(&t);\n t_-=8*3600;\n\n t2 = localtime(&t_);\n\n gps_local_time_.year = t2 -> tm_year;\n gps_local_time_.month = t2 -> tm_mon;\n gps_local_time_.day = t2 -> tm_mday;\n gps_local_time_.hour = t2 -> tm_hour;\n gps_local_time_.minute = t2 -> tm_min;\n gps_local_time_.second = t2 -> tm_sec;\n gps_local_time_.millisecond = utc_millisecond;\n }\n}\n}\n"
},
{
"alpha_fraction": 0.5631768703460693,
"alphanum_fraction": 0.5709987878799438,
"avg_line_length": 18.785715103149414,
"blob_id": "d7b688653b3bec456f42e1d5e4bca13a8b0112c4",
"content_id": "0822aef63de9b4aede588dde2bb3e0497e77db95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3324,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 168,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/flexarray.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _FLEX_ARRAY_H_\n#define _FLEX_ARRAY_H_\n\n#include <stdlib.h>\n\ntemplate<class Type>\nclass FlexArray\n{\nprivate:\n int _iMaxNumber;\n int _iNumber;\n Type *_pData;\n\npublic:\n FlexArray(void);\n FlexArray(int s);\n ~FlexArray(void);\n void reset(void);\n void clear(void);\n int getNumber(void);\n void add(Type Data);\n Type get(int idx);\n Type set(int idx, Type Data);\n inline Type *data(void)\n {\n return _pData;\n }\n void remove(int idx);\n void remove_delete(int idx);\n void insert(int idx, Type Data);\n void swap(int idx0, int idx1);\n};\n\ntemplate<class Type>\nvoid FlexArray<Type>::add(Type NewData)\n{\n if(_iNumber >= _iMaxNumber)\n {\n _iMaxNumber *= 2;\n Type *new_p = new Type[_iMaxNumber];\n if(new_p == NULL)\treturn;\n for(int iIdx = 0; iIdx < _iNumber; iIdx++)\n {\n new_p[iIdx] = _pData[iIdx];\n }\n delete [] _pData;\n _pData = new_p;\n }\n _pData[_iNumber] = NewData;\n _iNumber++;\n}\n\ntemplate<class Type>\nFlexArray<Type>::FlexArray(void)\n{\n _iNumber = 0;\n _iMaxNumber = 1;\n _pData = new Type[_iMaxNumber];\n}\n\ntemplate<class Type>\nFlexArray<Type>::~FlexArray(void)\n{\n if(_pData) delete [] _pData;\n _pData = NULL;\n}\n\ntemplate<class Type>\nvoid FlexArray<Type>::reset(void)\n{\n _iNumber = 0;\n}\n\ntemplate<class Type>\nvoid FlexArray<Type>::clear(void)\n{\n for(int iIdx = 0; iIdx < _iNumber; iIdx++)\n {\n if(_pData[iIdx] != NULL)\n {\n delete _pData[iIdx];\n }\n }\n}\n\ntemplate<class Type>\nType FlexArray<Type>::get(int iIdx)\n{\n if(iIdx < 0)\treturn NULL;\n if(iIdx >= getNumber())\treturn NULL;\n return _pData[iIdx];\n}\n\ntemplate<class Type>\nType FlexArray<Type>::set(int iIdx, Type Data)\n{\n return _pData[iIdx] = Data;\n}\n\ntemplate<class Type> int FlexArray<Type>::getNumber(void)\n{\n return _iNumber;\n}\n\ntemplate<class Type>\nvoid FlexArray<Type>::remove(int iDeleteIdx)\n{\n if(iDeleteIdx < 0)\treturn;\n if(iDeleteIdx >= _iNumber)\treturn;\n for(int iIdx = iDeleteIdx; iIdx < (_iNumber - 1); iIdx++)\n {\n _pData[iIdx] = _pData[iIdx + 1];\n }\n _iNumber--;\n}\n\ntemplate<class Type>\nvoid FlexArray<Type>::remove_delete(int iDeleteIdx)\n{\n if(iDeleteIdx < 0)\treturn;\n if(iDeleteIdx >= _iNumber)\treturn;\n delete _pData[iDeleteIdx];\n for(int iIdx = iDeleteIdx; iIdx < (_iNumber - 1); iIdx++)\n {\n _pData[iIdx] = _pData[iIdx + 1];\n }\n _iNumber--;\n}\n\ntemplate<class Type>\nvoid FlexArray<Type>::insert(int iInsertIdx, Type NewData)\n{\n if(iInsertIdx < 0)\treturn;\n if(iInsertIdx >= _iNumber)\n {\n add(NewData);\n return;\n }\n if(_iNumber >= _iMaxNumber)\n {\n _iMaxNumber *= 2;\n Type *new_p = new Type[_iMaxNumber];\n if(new_p == NULL)\treturn;\n for(int iIdx = 0; iIdx < _iNumber; iIdx++)\n {\n new_p[iIdx] = _pData[iIdx];\n }\n delete [] _pData;\n _pData = new_p;\n }\n for(int iIdx = (_iNumber - 1); iIdx >= iInsertIdx; iIdx--)\n {\n _pData[iIdx + 1] = _pData[iIdx];\n }\n _pData[iInsertIdx] = NewData;\n _iNumber++;\n}\n\ntemplate<class Type>\nvoid FlexArray<Type>::swap(int iIdx0, int iIdx1)\n{\n Type tmp = _pData[iIdx0];\n _pData[iIdx0] = _pData[iIdx1];\n _pData[iIdx1] = tmp;\n}\n\n\n#endif\n"
},
{
"alpha_fraction": 0.5233644843101501,
"alphanum_fraction": 0.5541506409645081,
"avg_line_length": 54.121212005615234,
"blob_id": "17c08b7bb423518c1ef0e630de0df30087999b7b",
"content_id": "1a92a52c9127742628f01a2532e7428ba0578460",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4028,
"license_type": "no_license",
"max_line_length": 184,
"num_lines": 66,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/OutputInfo.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "WINDOWS-1252",
"text": "class OutputInfo {\n//#define\tLOGIC_LOG_HEADER0\t\"?s?b?`?p[deg],?c¡¯f?¨¨¡ª|[1/m]\"\n//#define\tLOGIC_LOG_HEADER1\t\",??¡±¡¯?¨¹?I?t?Z?b?g[m],??¡±¡¯?¨¹???[?p[deg],??¡±¡¯?¨¹?¡ë?¨²?¨¨¡ª|[1/m],??¡±¡¯?¨¹?¨¨¡ª|??¡ë?¡ª|[1/m^2],??¡±¡¯?¨¹¡ªL???¡ª¡ª¡ê[m],??¡ë????I?t?Z?b?g[m]\"\n//#define\tLOGIC_LOG_HEADER2\t\",¡ëE¡±¡¯?¨¹?I?t?Z?b?g[m],¡ëE¡±¡¯?¨¹???[?p[deg],¡ëE¡±¡¯?¨¹?¡ë?¨²?¨¨¡ª|[1/m],¡ëE¡±¡¯?¨¹?¨¨¡ª|??¡ë?¡ª|[1/m^2],¡ëE¡±¡¯?¨¹¡ªL???¡ª¡ª¡ê[m],¡ëE¡ë????I?t?Z?b?g[m]\"\n\nprivate:\n\tdouble _dPitch;\t// ?s?b?`?p[deg]\n\tdouble _dCv;\t// ?c¡¯f?¨¨¡ª|[1/m]\n\tdouble _dLeftOffset;\t\t\t\t// ??¡±¡¯?¨¹?I?t?Z?b?g[m]\n\tdouble _dLeftYaw;\t\t\t\t\t// ??¡±¡¯?¨¹???[?p[deg]\n\tdouble _dLeftC0;\t\t\t\t\t// ??¡±¡¯?¨¹?¡ë?¨²?¨¨¡ª|[1/m]\n\tdouble _dLeftC1;\t\t\t\t\t//??¡±¡¯?¨¹?¨¨¡ª|??¡ë?¡ª|[1/m^2]\n\tdouble _dLeftAvailableDistance;\t\t// ??¡±¡¯?¨¹¡ªL???¡ª¡ª¡ê[m]\n\tdouble _dLeftCurbOffset;\t\t\t// ??¡ë????I?t?Z?b?g[m]\n\tdouble _dRightOffset;\t\t\t\t// ¡ëE¡±¡¯?¨¹?I?t?Z?b?g[m]\n\tdouble _dRightYaw;\t\t\t\t\t// ¡ëE¡±¡¯?¨¹???[?p[deg]\n\tdouble _dRightC0;\t\t\t\t\t// ¡ëE¡±¡¯?¨¹?¡ë?¨²?¨¨¡ª|[1/m]\n\tdouble _dRightC1;\t\t\t\t\t// ¡ëE¡±¡¯?¨¹?¨¨¡ª|??¡ë?¡ª|[1/m^2]\n\tdouble _dRightAvailableDistance;\t// ¡ëE¡±¡¯?¨¹¡ªL???¡ª¡ª¡ê[m]\n\tdouble _dRightCurbOffset;\t\t\t// ¡ëE¡ë????I?t?Z?b?g[m]\npublic:\n\tinline OutputInfo(void)\t{\n\t\t_dPitch = 0.0;\t\t\t\t\t// ?s?b?`?p[deg]\n\t\t_dCv = 0.0;\t\t\t\t\t\t// ?c¡¯f?¨¨¡ª|[1/m]\n\t\t_dLeftOffset = 0.0;\t\t\t\t// ??¡±¡¯?¨¹?I?t?Z?b?g[m]\n\t\t_dLeftYaw = 0.0;\t\t\t\t// ??¡±¡¯?¨¹???[?p[deg]\n\t\t_dLeftC0 = 0.0;\t\t\t\t\t// ??¡±¡¯?¨¹?¡ë?¨²?¨¨¡ª|[1/m]\n\t\t_dLeftC1 = 0.0;\t\t\t\t\t//??¡±¡¯?¨¹?¨¨¡ª|??¡ë?¡ª|[1/m^2]\n\t\t_dLeftAvailableDistance = 0.0;\t// ??¡±¡¯?¨¹¡ªL???¡ª¡ª¡ê[m]\n\t\t_dLeftCurbOffset = 0.0;\t\t\t// ??¡ë????I?t?Z?b?g[m]\n\t\t_dRightOffset = 0.0;\t\t\t// ¡ëE¡±¡¯?¨¹?I?t?Z?b?g[m]\n\t\t_dRightYaw = 0.0;\t\t\t\t// ¡ëE¡±¡¯?¨¹???[?p[deg]\n\t\t_dRightC0 = 0.0;\t\t\t\t// ¡ëE¡±¡¯?¨¹?¡ë?¨²?¨¨¡ª|[1/m]\n\t\t_dRightC1 = 0.0;\t\t\t\t// ¡ëE¡±¡¯?¨¹?¨¨¡ª|??¡ë?¡ª|[1/m^2]\n\t\t_dRightAvailableDistance = 0.0;\t// ¡ëE¡±¡¯?¨¹¡ªL???¡ª¡ª¡ê[m]\n\t\t_dRightCurbOffset = 0.0;\t\t// ¡ëE¡ë????I?t?Z?b?g[m]\n\t}\n\tinline double Pitch(void)\t\t\t\t\t{\treturn _dPitch;\t}\n\tinline double Cv(void)\t\t\t\t\t\t{\treturn _dCv;\t}\n\tinline double LeftOffset(void)\t\t\t\t{\treturn _dLeftOffset;\t}\n\tinline double LeftYaw(void)\t\t\t\t\t{\treturn _dLeftYaw;\t}\n\tinline double LeftC0(void)\t\t\t\t\t{\treturn _dLeftC0;\t}\n\tinline double LeftC1(void)\t\t\t\t\t{\treturn _dLeftC1;\t}\n\tinline double LeftAvailableDistance(void)\t{\treturn _dLeftAvailableDistance;\t}\n\tinline double LeftCurbOffset(void)\t\t\t{\treturn\t_dLeftCurbOffset;\t}\n\tinline double RightOffset(void)\t\t\t\t{\treturn _dRightOffset;\t}\n\tinline double RightYaw(void)\t\t\t\t{\treturn _dRightYaw;\t\t}\n\tinline double RightC0(void)\t\t\t\t\t{\treturn _dRightC0;\t\t}\n\tinline double RightC1(void)\t\t\t\t\t{\treturn _dRightC1;\t\t}\n\tinline double RightAvailableDistance(void)\t{\treturn _dRightAvailableDistance;\t}\n\tinline double RightCurbOffset(void)\t\t\t{\treturn _dRightCurbOffset;\t}\n\tinline void Pitch(double dV)\t\t\t\t\t{\t_dPitch = dV;\t}\n\tinline void Cv(double dV)\t\t\t\t\t\t{\t_dCv = dV;\t}\n\tinline void LeftOffset(double dV)\t\t\t\t{\t_dLeftOffset = dV;\t}\n\tinline void LeftYaw(double dV)\t\t\t\t\t{\t_dLeftYaw = dV;\t}\n\tinline void LeftC0(double dV)\t\t\t\t\t{\t_dLeftC0 = dV;\t}\n\tinline void LeftC1(double dV)\t\t\t\t\t{\t_dLeftC1 = dV;\t}\n\tinline void LeftAvailableDistance(double dV)\t{\t_dLeftAvailableDistance = dV;\t}\n\tinline void LeftCurbOffset(double dV)\t\t\t{\t_dLeftCurbOffset = dV;\t}\n\tinline void RightOffset(double dV)\t\t\t\t{\t_dRightOffset = dV;\t}\n\tinline void RightYaw(double dV)\t\t\t\t\t{\t_dRightYaw = dV;\t\t}\n\tinline void RightC0(double dV)\t\t\t\t\t{\t_dRightC0 = dV;\t\t}\n\tinline void RightC1(double dV)\t\t\t\t\t{\t_dRightC1 = dV;\t\t}\n\tinline void RightAvailableDistance(double dV)\t{\t_dRightAvailableDistance = dV;\t}\n\tinline void RightCurbOffset(double dV)\t\t\t{\t_dRightCurbOffset = dV;\t}\n};\n"
},
{
"alpha_fraction": 0.5582619905471802,
"alphanum_fraction": 0.5872284173965454,
"avg_line_length": 19.527027130126953,
"blob_id": "d304342dd7d8f9e1661819406e35d29f14dfdbae",
"content_id": "382fe0eae180888ae958b64138df39c3a19fe016",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1723,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 74,
"path": "/athena/core/x86/Control/include/chassis.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file chassis.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CHASSIS_H_\n#define CHASSIS_H_\n\n#include <iostream>\n\nusing namespace std;\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n\n/**\n * @class Chassis\n *\n * @brief 车辆底盘信息.\n */\nclass Chassis\n{\n public:\n Chassis()\n {\n car_speed_ = 0.0;\n at_status_feedback_ = 0;\n steering_angle_feedback_ = 0.0;\n steering_angle_speed_feedback_ = 0.0;\n lat_driving_mode_feedback_ = 0;\n throttle_output_feedback_ = 0.0;\n brake_value_feedback_ = 0.0;\n brake_run_time_feedback_ = 0.0;\n lon_driving_mode_feedback_ = 0;\n epb_status_feedback_ = 0;\n epb_driving_mode_feedback_ = 0;\n }\n ~Chassis() = default;\n ///车辆速度 m/s\n double car_speed_;\n ///档杆位置\n int32_t at_status_feedback_;\n ///转向角度反馈\n double steering_angle_feedback_;\n ///转向角速度反馈\n double steering_angle_speed_feedback_;\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t lat_driving_mode_feedback_;\n ///节气门输出反馈\n double throttle_output_feedback_;\n ///刹车值反馈\n double brake_value_feedback_;\n ///刹车执行时间反馈\n double brake_run_time_feedback_;\n ///纵向控制工作模式反馈 1 = 自动驾驶 0 = 非自动驾驶\n int32_t lon_driving_mode_feedback_;\n ///EPB状态反馈\n int32_t epb_status_feedback_;\n ///EPB控制工作模式反馈 1 = 自动驾驶 0 = 非自动驾驶\n int32_t epb_driving_mode_feedback_;\n};\n}\n}\n\n#endif // CHASSIS_H_\n"
},
{
"alpha_fraction": 0.6895810961723328,
"alphanum_fraction": 0.6895810961723328,
"avg_line_length": 24.16216278076172,
"blob_id": "83424c9d7905aee99745f6790ccabc5773caa31e",
"content_id": "e3004fba2c08cb5f3d29d284d94ecfae3e27687c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 931,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 37,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerLines.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerLine.h\"\n\nclass LaneMarkerLines\n{\nprivate:\n FlexArray<ptrLaneMarkerLine> *_faLaneMarkerLines;\n\npublic:\n LaneMarkerLines(void);\n LaneMarkerLines(LaneMarkerLines *pSrc);\n ~LaneMarkerLines(void);\n LaneMarkerLine *getLaneMarkerLine(int idx);\n void deleteLaneMarkerLine(void);\n void deleteLaneMarkerLine(int iIdx);\n int getLaneMarkerLineNumber(void);\n void addLaneMarkerLine(int iOffset, int iYaw, int iVotes);\n void addLaneMarkerLine(LaneMarkerLine *p);\n\n inline void remove_delete(int iIdx)\n {\n _faLaneMarkerLines->remove_delete(iIdx);\n }\n inline void clear_reset(void)\n {\n _faLaneMarkerLines->clear();\n _faLaneMarkerLines->reset();\n }\n inline void reset(void)\n {\n _faLaneMarkerLines->reset();\n }\n void set(int iIdx, LaneMarkerLine *pLaneMarkerLine);\n};\n"
},
{
"alpha_fraction": 0.5887751579284668,
"alphanum_fraction": 0.5915989875793457,
"avg_line_length": 24.070796966552734,
"blob_id": "366c173d045e68f21af3a579bdeffc916f492fac",
"content_id": "956550a11235ea452327fa327b592581c25255d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2987,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 113,
"path": "/athena/core/arm/Planning/include/trajectory/trajectory.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n#include <iostream>\n\n//#include \"common/ecu.h\"\n#include \"common/navi_point.h\"\n#include \"collision_check/collision_check.h\"\n\nusing namespace std;\n\nclass trajectory_sets;\nclass trajectory_cubic_sets;\n\nbool lane_collision_check(\n path& lane,\n RoadSurface& road,\n int st_pos, int en_pos,\n double check_length,\n int& free_length_num,\n double& free_length,\n double& obj_speed,\n double& obj_heading);\n\nbool lane_collision_check_moving_objects(\n path& lane,\n RoadSurface& road,\n int st_pos, int en_pos,\n double check_length,\n int& free_length_num,\n double& free_length,\n double& obj_speed,\n double& obj_heading);\n\nclass trajectory\n{\npublic:\n int tr_num;\n // collision_check_value, 0 is minimal\n double collision_check_value;\n double collision_time;\n double collision_obj_speed;\n double collision_distance;\n double first_collision_obj_num;\n double left_offset_value;\n double right_offset_value;\n\n double free_length;\n int free_length_num;\n int out_side_num;\n\n //目标点的代价值\n double end_pos_value;\n\n double ks_sum_value;\n double ls_sum_value;\n double steering_angle_sum_value;\n\n double consistency_value;\n double expected_lane_value;\n\n double select_value;\n\n //参考位置的开始和停止的值\n int start_pos;\n int end_pos;\n\n //参考里程的开始和停止的值\n double start_s;\n double end_s;\n\npublic:\n void init();\n\n trajectory();\n ~trajectory();\n\npublic:\n vector<navi_point> points; // 轨迹上的所有点 初步为2000个\n int points_num; // 总共的数目\n int points_num_1; // 到第一个目标点轨迹的数目\n};\n\nclass trajectory_cubic\n : public trajectory\n{\npublic:\n trajectory_cubic_sets *p_traj_cubic_sets;\n\npublic:\n void set_trajectory_sets(trajectory_cubic_sets* p_set);\n void generate_trajectory_lateral_offset_ks(double current_speed_lon, double offset);\n\n void copy_from(trajectory& tr);\n\n double traj_collision_check( const RoadSurface& road, const double car_speed ); //轨迹的碰撞检测\n float collision_check(RoadSurface& road);\n float collision_check_moving_objects(RoadSurface& road);\n\n float traj_ks_check();\n float ls_sum_check();\n float steering_angle_sum_check();\n\n float consistency_check(trajectory& tr);\n float consistency_check_virtual_lane(path& virtual_lane);\n\n trajectory_cubic();\n ~trajectory_cubic();\n\n //已知两点状态,生成轨迹\n int gen_trj_from_two_points( navi_point pos0, navi_point pos1, vector<navi_point>& out_points );\n\n};\n"
},
{
"alpha_fraction": 0.4646680951118469,
"alphanum_fraction": 0.4903640151023865,
"avg_line_length": 23.543859481811523,
"blob_id": "bb644def1a103b94add5d0c8417021faa16f80ef",
"content_id": "af074164a2fe61831b8f3d81aae4e16060e39d0a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1463,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 57,
"path": "/athena/cc/planning/quartic_spline_generate/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <fstream>\n#include <vector>\n#include \"spline/quartic_spline.h\"\n//#include \"common/navi_point.h\"\n\nusing namespace std;\n\nint main()\n{\n navi_point p0, p1;\n vector<navi_point> out_points;\n QuarticSpline quartic_spline;\n\n ///给起点赋值(坐标,方向,曲率)\n p0.position_x = 1.0;\n p0.position_y = 2.0;\n p0.heading = 45.0;\n p0.k_s = 0.06;\n\n ///给终点赋值(坐标,方向)\n p1.position_x = 20.0;\n p1.position_y = 10.0;\n p1.heading = 15.0;\n\n ///生成轨迹\n if ( quartic_spline.gen_trj_from_two_points( p0, p1, 0.1 ) )\n {\n cout << \"------- generate trajectory successfully -------\" << endl;\n quartic_spline.get_path_global( out_points );\n cout << \"size : \" << out_points.size() << endl;\n\n }\n\n /*log*/\n int out_log = 1;\n if(out_log)\n {\n ofstream outfile(\"trj_quartic_spline_log.log\", std::ios::app);\n outfile.precision(8);\n\n for(int i=0; i < out_points.size(); i++)\n {\n outfile << \" i \" << i\n << \" x \" << out_points[i].position_x\n << \" y \" << out_points[i].position_y\n << \" h \" << out_points[i].heading\n << \" k \" << out_points[i].k_s\n << endl;\n //outfile << output_virtual_path.ref_points[i].k_s << endl;\n }\n\n outfile << endl;\n outfile.close();\n }\n return 0;\n}\n\n\n"
},
{
"alpha_fraction": 0.5645879507064819,
"alphanum_fraction": 0.5913140177726746,
"avg_line_length": 17.70833396911621,
"blob_id": "a4fe8d760a336348c12051a04e630d711380a88f",
"content_id": "7c685538220dd9839096a703fc8c2ec081be6957",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1004,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 48,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/log/nad_ui_log.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_ui_log.h\n * 创建者:张毅00151602\n * 时 间:2016-03-28\n * 描 述:向OCT和VUI发日志\n-------------------------------------------------------*/\n#ifndef _NAD_UI_LOG_H\n#define _NAD_UI_LOG_H\n\n#include \"nad_base.h\"\n#include \"../../msg/nad_msg.h\"\n\n\n\n#ifdef _NAD_CSU_\n\n#include \"../../../csu/csu_planning/csu_zmq.h\"\n\nextern csu_zmq *g_csu_zmq;\n\n//向oct发日志,封装了cu_log_report\nvoid log_report_to_oct(string oct_name, int32_t log_level, string log);\n\n#endif\n\n#ifdef _NAD_RSU_\n\n#include \"../../../rsu/rsu_planning/rsu_zmq.h\"\n\nextern rsu_zmq *g_rsu_zmq;\n\n//向oct发日志,封装了rc_log_report\nvoid log_report_to_oct(int32_t log_level, string log);\n\n//向vui发日志,封装了ro_log_report\nvoid log_report_to_vui(string obu_name, int32_t log_level, string log);\n\n#endif\n\n#ifdef _NAD_OBU_\n\n//向vui发日志,封装了ou_log_report\nvoid log_report_to_vui(int32_t log_level, string log);\n\n#endif\n\n\n#endif\n"
},
{
"alpha_fraction": 0.6929637789726257,
"alphanum_fraction": 0.6929637789726257,
"avg_line_length": 38.08333206176758,
"blob_id": "df4d098981ecbde9fa9c564a957f2df89909899e",
"content_id": "2f2a750202ded82551ae9a9e66d72faba65fb219",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1407,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 36,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/roadimage_window.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef\t_ROADIMAGE_WINDOW_H_\n#define\t_ROADIMAGE_WINDOW_H_\n\n////////////////////////////////////////////////////////////////////////////////\n// personal header files and macros\n#include \"config.h\"\n#include \"imrgb.h\"\n#include \"my_resource.h\"\n\n///////////////////////////////////////////////////////////////////////////////////\n// function definition\n//////////////////////////////////////////////////////////////////////////\nextern IMRGB *getRoadImage(void);\nextern void drawRoadImageOnRoadImageWindow(IMRGB *pImrgb);\n\nextern void makeRoadImage(IMRGB *a_imrgb_input);\nextern void makeRoadImage(IMRGB *a_imrgb_input, DB dPitch);\nextern void drawVerticalEdgePointOnRoadImage(void);\nextern void drawHorizontalEdgePointOnRoadImage(void);\nextern void drawVerticalEdgePeakOnRoadImage(void);\nextern void drawHorizontalEdgePeakOnRoadImage(void);\nextern void drawWhiteLinesOnRoadImage(void);\nextern void drawRoadSideObjectsOnRoadImage(void);\nextern void drawLaneMarksOnRoadImage(void);\nextern void drawLaneBoundariesOnRoadImage(void);\nextern void drawEdgeBoundariesOnRoadImage(void);\nextern void drawLaneBoundaryPairsOnRoadImage(void);\nextern void drawTopLaneBoundaryPairsOnRoadImage(void);\nstatic void drawCrossWalkLineNumber(void);\n\n\nstatic void drawDepthLine(void);\nstatic void drawVerticalLine(void);\nstatic void drawHorizontalLineDepth(void);\nstatic void drawVerticalLineOffset(void);\n#endif\t_ROADIMAGE_WINDOW_H_\n"
},
{
"alpha_fraction": 0.3787528872489929,
"alphanum_fraction": 0.40415704250335693,
"avg_line_length": 17.04166603088379,
"blob_id": "d0a39469936753a646a1fe9e7a1d4f5118c78e0a",
"content_id": "a6e646b308bf3bfd7b037abadd93f61294b16244",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 571,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 24,
"path": "/athena/core/arm/Common/include/distributed_runtime/info/nad_info.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:msg.h\n * 时 间:2016-03-02\n * 描 述:消息通信相关结构体\n-------------------------------------------------------*/\n#ifndef _MSG_H\n#define _MSG_H\n\n\n/**\n * @brief 路径规划的公共结构\n * 地图上的关键点(但地图本身并不存关键点)\n */\nclass key_point\n{\npublic:\n double lon; ///<经度\n double lat; ///<纬度\n double yaw; ///<角度\n int32_t type; ///<关键点类型,枚举值KP_XXX,忽略则取值KP_UNKNOWN=0\n};\n\n\n#endif\n"
},
{
"alpha_fraction": 0.3643364906311035,
"alphanum_fraction": 0.3649289011955261,
"avg_line_length": 24.560606002807617,
"blob_id": "b5031180e2927daf5f5dd75688d694aed08bd508",
"content_id": "1383ffc7a2a10c1f321ffaafc7f79ce2f926bea7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1688,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 66,
"path": "/athena/core/arm/Planning/include/vehicle_dynamic/cau_heading_steering.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"common/path.h\"\n#include \"spline/spline.h\"\n#include \"vehicle_dynamic/heading.h\"\n#include \"vehicle_dynamic/steering_angle.h\"\n\n//////////////////////////////////////////////////////////////////////////////////////////////////\n//\n// static\n//\n/////////////////////////////////////////////////////////////////////////////////////////////////\n\nint preprocess_path( path& p_in );\nint preprocess_path2( path& p_in );\n\nvoid cau_all_output_from_map(\n path& p_in,\n path& p_out,\n int no_points,\n double speed);\n\nvoid cau_all_output_from_single_spline(\n path& p,\n int no_points,\n double speed);\n\nvoid cau_points_heading_steering_from_spline(\n vector<navi_point>& ref_points,\n int no_points,\n double speed);\n\n//////////////////////////////////////////////////////////////////////////////////////////////////\n//\n// for motion planning\n//\n/////////////////////////////////////////////////////////////////////////////////////////////////\n\nvoid cau_heading_steering_from_spline_half(\n path& p,\n int st_p,\n int end_p,\n double speed,\n double steering);\n\n//////////////////////////////////////////////////////////////////////////////////////////////////\n//\n// for motion planning\n//\n/////////////////////////////////////////////////////////////////////////////////////////////////\n\nvoid spline_head(\n path& p,\n double speed,\n int start,\n int length);\n\nvoid spline_steering(\n path& p,\n double speed,\n int start,\n int length);\n\n//void spline_kp(\n// path& p,\n// double speed,\n// int start,\n// int length );\n\n"
},
{
"alpha_fraction": 0.6114906668663025,
"alphanum_fraction": 0.6290372610092163,
"avg_line_length": 27.75,
"blob_id": "7aa86e04dc0015ed03b5dcc382977fdc395499f0",
"content_id": "e8c3425ce9338f75849c4d99868f0132ef5f38e2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 7820,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 224,
"path": "/athena/examples/LCM/Singlecar/obu/src/obu/obu_planning/obu_session_obu.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:obu_session_obu.h\n * 创建者:李思政\n * 时 间:2016-04-03\n * 描 述:obu管理自身用到的数据结构的定义\n-------------------------------------------------------*/\n#ifndef _OBU_SESSION_OBU_H\n#define _OBU_SESSION_OBU_H\n\n#include <map>\n\n\n#include \"route.h\"\n#include \"oam/alarm/nad_ui_alarm.h\"\n#include \"oam/alarm/nad_warning.h\"\n#include \"oam/log/nad_ui_log.h\"\n#include \"MapInterface.h\"\n#include \"info/nad_speed.h\"\n\n\n//抑制5秒内的重复换道请求\n#define MAX_CL_TIMEOUT 5000\n\n//如果头指向差异超过10度,认为是折线\n#define MAX_P_YAW 10.0\n\n\n//获得下一个轨迹点\nstatic void get_next_point(int &i, int e, std::vector<route::center_point> ¢er_line)\n{\n //如果下一个点越界则退出\n i++;\n if (i > e)\n {\n return;\n }\n\n //计算计算之前的头指向\n double yaw1 = 0.0;\n get_heading(center_line[i - 1].x, center_line[i - 1].y, center_line[i].x, center_line[i].y, yaw1);\n\n //寻找一个折线点\n for (; i <= e; i++)\n {\n //关键点必须添加\n route::center_point &cp = center_line[i];\n for (int j = 1; j < MAX_KP_TYPE; j++)\n {\n if (cp.type[j] == 1)\n {\n return;\n }\n }\n\n //曲率大的退出\n double yaw2 = 0.0;\n get_heading(center_line[i].x, center_line[i].y, center_line[i + 1].x, center_line[i + 1].y, yaw2);\n if (fabs(yaw2 - yaw1) > MAX_P_YAW)\n {\n return;\n }\n }\n}\n\nclass obu_session_obu : public nad_session\n{\npublic:\n //obu的基本参数\n //string name; //obu的名称,在nad_session中定义过\n string obu_type; //obu的类型\n int session_status; //obu的主状态,参考枚举值:E_OBU_SESSION_OBU_XXX\n int start_auto_status; //obu的启动自动驾驶状态,参考枚举值:E_OBU_SESSION_OBU_START_AUTO_XXX\n int flashing_status; //转向灯状态,参考枚举值FLASHING_STATUS_XXX\n int wait_count; //用于重发计数,用于oc_rsu_name_request、or_obu_login_request、or_route_request、or_start_auto_request\n int64_t last_htbt; //用于心跳计数,在收到ro_vui_report时更新心跳时间\n\n //obu的交通信息\n double cur_lon; //当前经度\n double cur_lat; //当前纬度\n double cur_yaw; //头指向和北方向的夹角,单位:度\n double cur_brake; //刹车踏板行程,踩到底取值100.0\n double cur_speed;\n double cur_acceleration;\n double steering_angle;\n int32_t cur_gears;\n int32_t sensor_stat[MAX_SENSOR];\n int32_t platoon_sn; //在编队中的需要,1=头车,0=不再编队里\n double gps_time; //上报时间(GPS授时是高精度绝对时间)\n int32_t eps_stat; //方向盘\n int32_t epb_stat; //epb 1释放 2使能\n int32_t brake_stat; //刹车\n\n double sug_speed;\n int32_t right_of_way;\n //路径规划\n route::RouteBase* route; //路径规划结果\n route::RouteBase* route_draw[2];//双buffer route\n bool route_draw_flag = true;\n\n //路径规划状态\n bool route_plan_status;\n\n //速度控制\n nad_speed speed;\n\n //自动驾驶开关\n bool is_auto_drive;\n\n //obu召车、还车的状态信息\n int call_park_state; //当前状态:0 可召车,1 可还车,2 召车中,3 还车中(0,1状态不需要填写下面的信息)\n\n //换道抑制,每类型&同方向的换道5秒才可重发一次\n map<string, int64_t> cl_list;\n\n //抑制,避障换道5秒才可重发一次 :在左侧车道往左换道,在右侧车道往右换道\n map<int32_t, int64_t> cl_avoid_list;\n\n //在完整切片列表中选取当前obu前1000,后100米的所有切片\n void send_segment_section_line(nad_lcm::route_planning_m route,nad_lcm::route_planning_m &other);\n\n //给vui下发路径规划结果,也可用于主动下发路径\n void send_ou_route_respond(int ret,nad_lcm::route_planning route);\n\n //给vui下发启动自动驾驶结果,也可用于主动下发启动自动驾驶\n void send_ou_start_auto_respond(int32_t retcode, int32_t start_reason);\n\n //给vui下发退出自动驾驶结果,也可用于主动下发退出自动驾驶\n void send_ou_stop_auto_respond(int32_t retcode, int32_t stop_reason);\n\n //保存到消息\n void save_to_lcm(nad_lcm::route_planning &other);\n\n //保存到消息\n void save_to_lcm(nad_lcm::obu_info &other);\n\n //保存到消息中\n void save_to_lcm(nad_lcm::center_point &cp);\n\n //抑制5秒内的重复换道请求\n bool cl_too_busy(int32_t direction, int32_t cl_reason);\n\n //抑制5秒内的避障\n bool cl_too_busy_avoiding(int32_t cl_reason);\n\n //内部调用的换道执行操作\n //check_busy=拒绝5秒内的同类型、同方向换道请求\n //check_target=拒绝目标车道不存在的换道请求\n //check_follow=拒绝编队跟车的换道请求\n //check_forbid=拒绝\"禁止换道\"标志牌范围内的换道请求\n //check_platoon=编队头车动作时,跟车生成换道\n //check_cooperate=目标车道存在OBU车辆时,进入协作换道操作\n int change_lane_ex(int32_t direction, int32_t cl_reason, int starting_lane, int ending_lane,\n bool need_ack, bool check_busy, bool check_target, bool check_follow,\n bool check_forbid, bool check_platoon, bool check_cooperate);\n\n int send_change_lane_respond(nad_lcm::om_change_lane_respond &om_change_lane_respond_msg,bool need_ack, int ret, string alarm);\n\n void send_ou_vui_report();\n\n void save_route_kp_to_lcm(int16_t &num_of_key, std::vector<nad_lcm::key_point_info> &key_point,\n int16_t &cur_point_index, int16_t &num_of_point, std::vector<nad_lcm::route_line_point> &line);\npublic:\n //构造析构函数\n obu_session_obu();\n virtual ~obu_session_obu();\n\n //处理定时器\n void handle_timer(int64_t counter);\n\n //注册消息到lcm的函数\n void reg_msg_to_lcm(NE_LCM* lcm);\n\n //接收vui上传的uo_route_request\n void handle_uo_route_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::uo_route_request *msg);\n\n //接收vui上传的uo_start_auto_request\n void handle_uo_start_auto_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::uo_start_auto_request *msg);\n\n //接收vui上报的uo_stop_auto_request\n void handle_uo_stop_auto_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::uo_stop_auto_request *msg);\n\n //读取control_info_report\n void handle_control_info_report(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const obu_lcm::control_info_report *msg);\n\n //motion_planner请求换道\n void handle_mo_change_lane_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::mo_change_lane_request *msg);\n\n void send_om_section_line_report(int replay_flag);\n\n void calc_mileage_section_acc();\n\n void route_plan_section_clear(nad_lcm::route_planning_m &route_sec);\n\n void planning_m_2_lcm(route::route_planning_m & planning_m, nad_lcm::route_planning_m &planning_m_lcm);\n\n void lcm_2_planning_m(nad_lcm::route_planning_m & planning_m_lcm, route::route_planning_m &planning_m);\n\n //设置默认速度\n void set_default_speed();\n\n //速度处理\n void handle_speed();\n};\n\n//obu_session_obu的定时器\nclass obu_session_obu_timer : public nad_timer\n{\npublic:\n //构造析构函数\n obu_session_obu_timer();\n obu_session_obu_timer(int64 interval_ms);\n virtual ~obu_session_obu_timer();\n\n //执行定时器\n void handle();\n};\n\n\n#endif\n"
},
{
"alpha_fraction": 0.4653802514076233,
"alphanum_fraction": 0.4888762831687927,
"avg_line_length": 26.018404006958008,
"blob_id": "1f5ff9d15788ca2031e3f1debf03d2dc1d86b53a",
"content_id": "0e2bc8cf357342a4729097dd972865333bef5582",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9348,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 326,
"path": "/athena/examples/LCM/Singlecar/control/apps/track_trajectory/cau_heading_steering.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "\n#include \"cau_heading_steering.h\"\n\n//////////////////////////////////////////////////////////////////////////////////////////////////\n//\n// only single spline cau steering angle and heading 从文件中读, 不存在里程为负值的情况。\n// 当前使用的方法\n/////////////////////////////////////////////////////////////////////////////////////////////////\n//读本地地图\nvoid cau_all_output_from_single_spline(path& p, int no_points, double speed)\n{\n if( no_points < 10 )\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n int i,k;\n\n std::vector<double> x, y, s, h, st;\n std::vector<double> xx, yy, ss, hh, stst;\n\n tk::spline s_x1, s_y1;\n\n double steering_ks;\n double heading_ks;\n double Ux_ks;\n double cs, r;\n\n double INTERPOLATION_INTERVAL = 7;\n double SPLINE_EVERY = 0.1;\n int interval = (int )(INTERPOLATION_INTERVAL/SPLINE_EVERY ); //INTERPOLATION_INTERVAL = 7; SPLINE_EVERY = 0.1\n if(no_points < 3*interval)\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n // 采集均匀的控制点\n k=0;\n /* for( i=0; i< no_points; i +=INTERPOLATION_POINT_NUM)\n {\n // if(p.ref_points[i].s > k*INTERPOLATION_INTERVAL) //0.348925 //0.308564\n {\n s.push_back(p.ref_points[i].s);\n x.push_back(p.ref_points[i].position_x);\n y.push_back(p.ref_points[i].position_y);\n k++;\n }\n }*/\n //先加入一个\n s.push_back(p.ref_points_[0].s_);\n x.push_back(p.ref_points_[0].position_x_);\n y.push_back(p.ref_points_[0].position_y_);\n k++;\n // 内部计算里程,可能出现负值的情况。\n // 70\n for(i= interval; i< no_points; i += interval )\n {\n // if(p.ref_points[i].s > k*INTERPOLATION_INTERVAL\n // && p.ref_points[i].s > s.back() + 0.5 )\n if(p.ref_points_[i].s_ > s.back() + 0.1)\n {\n s.push_back(p.ref_points_[i].s_);\n x.push_back(p.ref_points_[i].position_x_);\n y.push_back(p.ref_points_[i].position_y_);\n k++;\n }\n }\n if(s.size() < 2)\n {\n cout << \"spline points not enough\" << endl;\n abort();\n }\n\n //里程的插值方法\n s_x1.set_points(s,x);\n s_y1.set_points(s,y);\n\n p.ref_points.clear(); //?????? big error\n navi_point np;\n\n cs = 0;\n i = 0;\n while( cs<= s[s.size()-1])\n {\n cs = i * SPLINE_EVERY; // 0.1\n Ux_ks = CONST_SPEED; // speed;\n\n steering_ks = cau_steering_angle_from_ks(\n s_x1,\n s_y1,\n cs,\n Ux_ks,\n r,\n STEERING_CAU_ONE_POINT,\n STEERING_CAU_TWO_POINT);\n\n steering_ks = iclamp(steering_ks, MIN_STEERING_ANGLE, MAX_STEERING_ANGLE);\n\n //add by alex 20170607 new way to calc heading\n heading_ks = cau_heading_angle_from_ks(s_x1,s_y1,cs);\n\n np.s = cs;\n np.point_no = i;\n np.heading = heading_ks;\n np.steering_angle = steering_ks;\n np.position_x = s_x1(cs);\n np.position_y = s_y1(cs);\n np.k_s = 1/r;\n\n int every = MARK_EVERY;\n if( i % every == 0 ) //控制点密度和当前速度相关。speed\n np.control_mark = 1;\n else\n np.control_mark = 0;\n\n p.ref_points.push_back(np);\n\n i++;\n }\n\n int length = p.ref_points.size();\n spline_kp(p, speed, length );\n\n}\n\n//add by alex 20170615 对接收的轨迹重新插值计算\n//使用该方法的前提是传入的轨迹点距必须时0.1m\n//新轨迹需要的信息:x,y,heading,ks,s,steering_angle,gps_time,v,a,档位,其中gps_time,v,a,档位直接读取原始值\n//根据曲率变化重新插值\n#define KS_STEP 0.015\n#define S_STEP 20\nvoid cau_path_from_spline(path& p, int no_points)\n{\n if( no_points < 10 )\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n int i,k;\n\n std::vector<double> x, y, s;\n //std::vector<double> steer_angle, point_speed;\n tk::spline s_x1, s_y1;\n //tk::spline s_steer,s_speed;\n\n double steering_ks;\n double heading_ks;\n double Ux_ks;\n double cs, r;\n\n double INTERPOLATION_INTERVAL = 7;\n double SPLINE_EVERY = 0.1;\n\n int interval = (int )(INTERPOLATION_INTERVAL/SPLINE_EVERY ); //INTERPOLATION_INTERVAL = 7; SPLINE_EVERY = 0.1 每70个点取一个点\n if(no_points < 3*interval)\n {\n cout << \"ponis num not enough\" << endl;\n abort();\n }\n\n // 采集均匀的控制点\n k=0;\n //先加入一个\n s.push_back(p.ref_points[0].s);\n x.push_back(p.ref_points[0].position_x);\n y.push_back(p.ref_points[0].position_y);\n k++;\n // 内部计算里程,可能出现负值的情况。\n // 70\n for(i= interval; i< no_points; i += interval )\n {\n // if(p.ref_points[i].s > k*INTERPOLATION_INTERVAL\n // && p.ref_points[i].s > s.back() + 0.5 )\n if(p.ref_points[i].s > s.back() + 0.1)\n {\n s.push_back(p.ref_points[i].s);\n x.push_back(p.ref_points[i].position_x);\n y.push_back(p.ref_points[i].position_y);\n k++;\n }\n double distance = 0;\n double delta_ks=0;\n interval = 1;\n while(distance < S_STEP && (i+interval < no_points) && (delta_ks < KS_STEP))//取点改为每隔3m取一个点\n {\n distance = length_two_points(p.ref_points[i+interval].position_x, p.ref_points[i+interval].position_y,\n p.ref_points[i].position_x, p.ref_points[i].position_y);\n delta_ks = fabs(p.ref_points[i].k_s - p.ref_points[i+interval].k_s);\n interval++;\n }\n }\n if(s.size() < 2)\n {\n cout << \"spline points not enough\" << endl;\n abort();\n }\n\n //里程的插值方法\n s_x1.set_points(s,x);\n s_y1.set_points(s,y);\n\n path path_bak;\n path_bak.ref_points.clear();\n path_bak.reset_path(p.ref_points);//copy some msg to path_bak\n\n p.ref_points.clear();\n navi_point np;\n\n cs = 0;\n i = 0;\n while( cs<= s[s.size()-1])\n {\n cs = i * SPLINE_EVERY; // 0.1\n Ux_ks = CONST_SPEED; // speed;\n\n if(cs <= 5.5)\n {\n steering_ks = cau_steering_angle_from_ks(s_x1,s_y1,cs,Ux_ks,r,0,8.5);\n }\n else\n {\n steering_ks = cau_steering_angle_from_ks(\n s_x1,\n s_y1,\n cs,\n Ux_ks,\n r,\n -5.5,//-5.5 STEERING_CAU_ONE_POINT debug by alex\n 8.5);//8.5 STEERING_CAU_TWO_POINT\n }\n\n steering_ks = iclamp(steering_ks, MIN_STEERING_ANGLE, MAX_STEERING_ANGLE);\n\n //add by alex 20170607 new way to calc heading\n heading_ks = cau_heading_angle_from_ks(s_x1,s_y1,cs);\n\n np.s = cs;\n np.point_no = i;\n np.heading = heading_ks;\n\n np.steering_angle = steering_ks;\n np.position_x = s_x1(cs);\n np.position_y = s_y1(cs);\n\n //add by alex20170615\n np.pos_gps_time = path_bak.ref_points[i].pos_gps_time;\n np.acceleration_desired_Axs = path_bak.ref_points[i].acceleration_desired_Axs;\n np.speed_desired_Uxs = path_bak.ref_points[i].speed_desired_Uxs;\n np.curvature = path_bak.ref_points[i].curvature;\n np.p_g = path_bak.ref_points[i].p_g;\n\n np.k_s = -1.0/r;//曲率反向\n\n\n p.ref_points.push_back(np);\n\n i++;\n }\n path_bak.ref_points.clear();\n\n int length = p.ref_points.size();\n spline_kp(p, 8, length );\n\n}\n\n// 计算动态KP值(elvis最新修改,修改前会造成内存错误)\nvoid spline_kp(path& p, double speed, int length )\n{\n int i;\n std::vector<double> ss, skp; // 构造二维矩阵:X,Y\n double cs;\n double cau_kp;\n ss.clear();\n skp.clear();\n\n // 550 -> 12\n // 15 -> 2\n // 550 -15 = 545/10 = 54.5\n\n // 200 / 54.5 = 4+2 = 6\n // 400 / 54.5 = 8+2 = 10\n double max_steering = 0;\n if((length < MAP_POINT_LIM_MIN) || (length > MAP_POINT_LIM_MAX)) return; //地图检验\n for(i=0; i< length; i ++)\n {\n max_steering = fabs(p.ref_points[i].steering_angle);\n cau_kp = (max_steering)/KP_SLOPE + KP_VALUE;\n\n if(cau_kp > 10.0)\n cau_kp = 10;\n\n p.ref_points[i].suggest_kp = cau_kp;\n }\n\n int every = MARK_EVERY;//70\n\n for(i=0; i< length; i += every)\n {\n if (i==0)\n {\n ss.push_back(p.ref_points[i].s); // 将曲率传入到矩阵中\n skp.push_back(p.ref_points[i].suggest_kp);// 将推荐KP传入到矩阵中\n }\n else\n {\n if(p.ref_points[i].s > ss.back() + 0.5)\n {\n ss.push_back(p.ref_points[i].s);\n skp.push_back(p.ref_points[i].suggest_kp);\n }\n }\n }\n // 新建一个拟合器\n tk::spline skp_st;\n // 拟合器更新拟合点\n skp_st.set_points(ss, skp);// tips\n\n for(i=0; i< length; i++)\n {\n cs = p.ref_points[i].s;\n cau_kp = skp_st(cs);\n p.ref_points[i].suggest_kp = cau_kp;\n }\n}\n\n"
},
{
"alpha_fraction": 0.6067746877670288,
"alphanum_fraction": 0.6244477033615112,
"avg_line_length": 21.633333206176758,
"blob_id": "bac5253d03057b6fc7133c00ce5203abbce3c4ad",
"content_id": "fba8961663fdaa6acc391c1d8c0e0396c9658903",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 679,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 30,
"path": "/athena/core/x86/Camera/vision_ssd_detect/util/BoundingBox.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef CPP_BOUNDINGBOX_H\n#define CPP_BOUNDINGBOX_H\n\n\n#include <ostream>\n\nclass BoundingBox\n {\n // BoundingBox(double cx, double cy, double width, double height);\npublic:\n\n double cx;\n\n double cy;\n\n double width;\n\n double height;\n\n // (x1,y1) - top left\n // (x2,y2) - bottom right\n inline double x1() const { return cx - width / 2; }\n inline double y1() const { return cy - height / 2; }\n inline double x2() const { return cx + width / 2; }\n inline double y2() const { return cy + height / 2; }\n inline double area() const { return width * height; }\n inline double ratio() const { return width / height; }\n};\n\n#endif //CPP_BOUNDINGBOX_H\n"
},
{
"alpha_fraction": 0.6847192049026489,
"alphanum_fraction": 0.7008832097053528,
"avg_line_length": 26.91162872314453,
"blob_id": "71523c5909ff31a76ecef2c83b81830a0421fe28",
"content_id": "987b20bbf02557af5f152965071589c5540e3192",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6129,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 215,
"path": "/athena/core/arm/Control/include/lqr_controller/lqr_lat_controller.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file lqr_lat_controller.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_LQR_LAT_LATCONTROLLER_H_\n#define CONTROLLER_LQR_LAT_LATCONTROLLER_H_\n\n#include \"../generic_controller.h\"\n#include \"../common/eigen3/Eigen/Core\"\n#include \"../common/filters/digital_filter.h\"\n#include \"../common/filters/digital_filter_coefficients.h\"\n#include \"../common/filters/mean_filter.h\"\n#include \"../common/interpolation_1d.h\"\n#include \"../common/interpolation_2d.h\"\n#include \"simple_lateral_debug.h\"\n#include \"../scheduler.h\"\n\n\n#include <memory>\nusing namespace std;\n\nusing namespace apollo::common;\nusing namespace apollo::control;\nusing Matrix = Eigen::MatrixXd;\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n/**\n * @class LQRLatController\n *\n * @brief LQRLatController.\n */\nclass LQRLatController:public GenericController{\n public:\n /**\n * @brief constructor\n */\n LQRLatController() = default;\n\n /**\n * @brief destructor\n */\n ~LQRLatController() = default;\n\n /**\n * @brief init.\n * @param[in] controller_config controller config.\n * @return true or false.\n */\n bool Init(const ControllerConfig controller_config);\n\n /**\n * @brief ComputeControlOutput.\n * @param[in] path 轨迹.\n * @param[in] match_point_no 轨迹匹配索引.\n * @param[in] localiation 定位信息.\n * @param[in] chassis 车辆底盘信息.\n * @param[in] ControllerOutput 控制器输出.\n * @return true or false.\n */\n bool ComputeControlOutput(path * path,uint32_t match_point_no,const LocalLocalization * local_localiation,const Chassis *chassis,ControllerOutput * controller_output);\n\n /**\n * @brief 获取调试信息.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n void GetControllerDebugInfo(DebugOutput &debug_output);\n\n /**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n void SetTarSpeedDebug(int32_t tar_speed,bool valid);\n\n protected:\n ControllerConfig controller_config_;\n //the following parameters are vehicle physics related.\n //control time interval\n double ts_ = 0.01;\n // corner stiffness; front\n double cf_ = 0.0;\n // corner stiffness; rear\n double cr_ = 0.0;\n // distance between front and rear wheel center\n double wheelbase_ = 0.0;\n // mass of the vehicle\n double mass_ = 0.0;\n //distance from front wheel center to COM\n double lf_ = 0.0;\n //distance from rear wheel center to COM\n double lr_ = 0.0;\n //rotational inertia\n double iz_ = 0.0;\n // the ratio between the turn of the steering wheel and the turn of the wheels\n double steer_ratio_ = 0.0;\n // the maximum turn of steer\n double steer_single_direction_max_degree_ = 0.0;\n\n // limit steering to maximum theoretical lateral acceleration\n double max_lat_acc_ = 0.0;\n\n // number of control cycles look ahead (preview controller)\n int preview_window_ = 0;\n // number of states without previews, includes\n // lateral error, lateral error rate, heading error, heading error rate\n const int basic_state_size_ = 4;\n // vehicle state matrix\n Eigen::MatrixXd matrix_a_;\n // vehicle state matrix (discrete-time)\n Eigen::MatrixXd matrix_ad_;\n // vehicle state matrix compound; related to preview\n Eigen::MatrixXd matrix_adc_;\n // control matrix\n Eigen::MatrixXd matrix_b_;\n // control matrix (discrete-time)\n Eigen::MatrixXd matrix_bd_;\n // control matrix compound\n Eigen::MatrixXd matrix_bdc_;\n // gain matrix\n Eigen::MatrixXd matrix_k_;\n // control authority weighting matrix\n Eigen::MatrixXd matrix_r_;\n // state weighting matrix\n Eigen::MatrixXd matrix_q_;\n // updated state weighting matrix\n Eigen::MatrixXd matrix_q_updated_;\n // vehicle state matrix coefficients\n Eigen::MatrixXd matrix_a_coeff_;\n // 4 by 1 matrix; state matrix\n Eigen::MatrixXd matrix_state_;\n\n // parameters for lqr solver; number of iterations\n int lqr_max_iteration_ = 0;\n // parameters for lqr solver; threshold for computation\n double lqr_eps_ = 0.0;\n\n DigitalFilter digital_filter_;\n\n std::unique_ptr<Interpolation1D> lat_err_interpolation_;\n\n std::unique_ptr<Interpolation1D> heading_err_interpolation_;\n\n // MeanFilter heading_rate_filter_;\n MeanFilter lateral_error_filter_;\n MeanFilter heading_error_filter_;\n\n const std::string name_;\n\n double pre_steer_angle_ = 0.0;\n\n double minimum_speed_protection_ = 0.1;\n\n double current_trajectory_timestamp_ = -1.0;\n\n double init_vehicle_x_ = 0.0;\n\n double init_vehicle_y_ = 0.0;\n\n double init_vehicle_heading_ = 0.0;\n\n double min_turn_radius_ = 0.0;\n\n int q_param_size_ = 6;\n\n //double matrix_q[6] = {0.05,0.0,1.0,0.0,0.0,0.0};\n std::vector<double> matrix_q_init_;\n int32_t cutoff_freq_ = 10;\n int32_t mean_filter_window_size_ = 10;\n int32_t lat_err_scheduler_size_ = 5;\n std::vector<Scheduler> lat_err_scheduler_init_;\n int32_t heading_err_scheduler_size_ = 5;\n std::vector<Scheduler> heading_err_scheduler_init_;\n private:\n\n /**\n * @brief InitializeFilters 初始化滤波器.\n * @return void.\n */\n void InitializeFilters();\n\n /**\n * @brief LoadLatGainScheduler 导入调节器.\n * @return void.\n */\n void LoadLatGainScheduler() ;\n\n /**\n * @brief LoadLQR控制配置文件\n * @return void.\n */\n bool LoadLQRControlConf();\n void UpdateMatrixCompound();\n double CalculateErrorSignAndVaule(double x, double y, double yaw,double x_c, double y_c, double yaw_c);\n void CalculateDertaFiByTable(double current_heading, double head_c, double *p_derta_fi, double *p_derta_fi_radian);\n void UpdateMatrix(const LocalLocalization * local_localiation);\n double ComputeFeedForward(const LocalLocalization * local_localiation,double ref_curvature) const;\n void UpdateState(path * path,uint32_t match_point_no,const LocalLocalization * local_localiation,SimpleLateralDebug *debug);\n void ComputeLateralErrors(path * path,uint32_t match_point_no,const double x, const double y,const double theta,const double linear_v,const double angular_v,SimpleLateralDebug *debug);\n};\n}\n}\n\n#endif\n"
},
{
"alpha_fraction": 0.5673515796661377,
"alphanum_fraction": 0.5764840245246887,
"avg_line_length": 18.46666717529297,
"blob_id": "11eadcf313ead4c814363e2f838a0701c4875921",
"content_id": "d5ec3f08477f63e7941d03f5755b2864525991c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1064,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 45,
"path": "/athena/examples/LCM/Singlecar/obu/src/obu/obu_planning/obu_planning.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:obu_planning.h\n * 创建者:李思政\n * 时 间:2016-04-03\n * 描 述:obu主进程,用于处理相关网元注册登录,交通信息,路径规划信息\n-------------------------------------------------------*/\n#ifndef _OBU_PLANNING_H\n#define _OBU_PLANNING_H\n\n//头文件\n//#include \"obu_session_oct.h\"\n#include \"obu_session_obu.h\"\n#include \"starter/nad_starter.h\"\n\nvoid* ThreadFunction_map(void* param);\n//进程启动器\nclass obu_planning_starter : public nad_starter\n{\npublic:\n obu_session_obu obu; //管理车辆自身,包含了地图等信息\n //obu_session_oct_manager oct_manager; //管理oct\n\npublic:\n pthread_t ptht_draw;\n\npublic:\n //构造析构函数\n obu_planning_starter();\n virtual ~obu_planning_starter();\n\n //自定义启动退出函数\n int user_start();\n void user_stop();\n\n //同步时间\n void set_timer();\n};\n\n//进程公共全局变量\nextern obu_planning_starter *g_obu_planning;\n\n//lcm\n//extern NE_LCM *g_lcm;\n\n#endif\n"
},
{
"alpha_fraction": 0.6248244643211365,
"alphanum_fraction": 0.6394079923629761,
"avg_line_length": 30.69862937927246,
"blob_id": "777ec29316be61b5c6286f67edac707ffa77828b",
"content_id": "36b5aea98c2c2c8bfc143b2164a01b6c00d5ca23",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9257,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 292,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneParameterOneSide.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#include \"../utils/config.h\"\n#include \"../utils/config2.h\"\n#include \"../utils/matutil-d.h\"\n#include \"../utils/tmc_stereobmp-forMono.h\"\n#include \"LaneMarkerPoint.h\"\n#include \"LaneMarkerPoints.h\"\n\n#pragma once\n\n/////////////////////\n#define\tCS4_ONESIDE_STATUS_NUMBER\t6\n#define\tCS4_ONESIDE_OBS_NUMBER\t10\n#define\tCS4_ONESIDE_CONTROL_INPUT_NUMBER\t2\n#define\tCS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER\t6 //3\n\nclass LaneParameterOneSide\n{\nprivate:\n static const int _iDim = CS4_ONESIDE_STATUS_NUMBER;\n BOOL _bAvailable;\n int _iK;\n int _iCounterAfterInitialization;\n double _dParameterInit[_iDim];\n double _dParameterMin[_iDim];\n double _dParameterMax[_iDim];\n//\tdouble _dParameter[_iDim];\n\n BOOL _bNotAddSystemNoise;\n\n int _s4_mobs;\n\n double _adb_X_t_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* 1];\n double _adb_X_t_tp[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* 1];\n double _adb_X_tn_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* 1];\n double _adb_P_t_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_P_t_tp[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_P_tn_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_R_t[\tCS4_ONESIDE_OBS_NUMBER\t\t\t\t\t* CS4_ONESIDE_OBS_NUMBER];\n double _adb_F_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_F_t_T[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_D_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_CONTROL_INPUT_NUMBER];\n double _adb_G_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_G_t_T[\tCS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_K_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_OBS_NUMBER];\n double _adb_U_t[\tCS4_ONESIDE_CONTROL_INPUT_NUMBER\t\t* 1];\n double _adb_H_t[\tCS4_ONESIDE_OBS_NUMBER\t\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_H_t_T[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_OBS_NUMBER];\n double _adb_Q_t[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_h_t[\tCS4_ONESIDE_OBS_NUMBER\t\t\t\t\t* 1];\n double _adb_y_t[\tCS4_ONESIDE_OBS_NUMBER\t\t\t\t\t* 1];\n int _adb_side_t[\tCS4_ONESIDE_OBS_NUMBER\t\t\t\t\t* 1];\n double _adb_i_t[\tCS4_ONESIDE_OBS_NUMBER\t\t\t\t\t* 1];\n double _adb_P_0[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_Q_0[\tCS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER\t* CS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_DM_a[2 * 3];\n double _adb_DM_b[2 * 2];\n double _db_delta_t;\n\n double _adb_X_t_t_NearArea[\tCS4_ONESIDE_STATUS_NUMBER\t\t\t\t* 1];\n // psidash, psi, edash, e rhodash, rho, phi, w\n//\tdouble _adb_X_0[CS4_STATUS_NUMBER * 1];\n//\tdouble _adb_X_m[CS4_STATUS_NUMBER * 1];\n\n//\tdouble _adb_W_m[CS4_STOCHASTIC_VARIAVLE_NUMBER * 1];\n//\tdouble _adb_W_m0[CS4_STOCHASTIC_VARIAVLE_NUMBER * 1];\n//\tdouble _adb_V_m[CS4_OBS_NUMBER * 1];\n //(1)\n double _adb_H_tP_t_tp[\t\t\t\tCS4_ONESIDE_OBS_NUMBER\t\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_H_tP_t_tpH_t_T[\t\t\tCS4_ONESIDE_OBS_NUMBER\t\t* CS4_ONESIDE_OBS_NUMBER];\n double _adb_H_tP_t_tpH_t_T_R_t[\t\tCS4_ONESIDE_OBS_NUMBER\t\t* CS4_ONESIDE_OBS_NUMBER];\n double _adb_H_tP_t_tpH_t_T_R_t_1[\tCS4_ONESIDE_OBS_NUMBER\t\t* CS4_ONESIDE_OBS_NUMBER];\n double _adb_P_t_tpHt_T[\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_OBS_NUMBER];\n //(2)\n double _adb_K_tHt[\t\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_K_tHtP_t_tp[\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_STATUS_NUMBER];\n //(3)\n double _adb_y_t_h_t[\t\t\t\tCS4_ONESIDE_OBS_NUMBER\t\t* 1];\n double _adb_K_ty_t_h_t[\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* 1];\n //(4)\n double _adb_G_tQ_t[\t\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_G_tQ_tG_t_T[\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_F_tP_t_t[\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_STATUS_NUMBER];\n double _adb_F_tP_t_tF_t_T[\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* CS4_ONESIDE_STATUS_NUMBER];\n //(5)\n double _adb_F_tX_t_t[\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* 1];\n double _adb_D_tU_t[\t\t\t\t\tCS4_ONESIDE_STATUS_NUMBER\t* 1];\n\n#ifdef\tUSE_DYNAMIC_SYSTEM_NOISE\n int _aiDynamicNoiseFlag[CS4_ONESIDE_STATUS_NUMBER];\n#endif\tUSE_DYNAMIC_SYSTEM_NOISE\n\n double _dDetectedDistance;\n double _dDeltaOffsetCurb;\n\npublic:\n inline LaneParameterOneSide(PARAM_CAM *p, int iK)\n {\n _bAvailable = FALSE;\n _iCounterAfterInitialization = 0;\n _iK = iK;\n\n setupParameterInit(p);\n setupParameterMinMax(p);\n\n _db_delta_t = 0.1;\n initialize();\n }\n\n inline ~LaneParameterOneSide()\n {\n\n }\n inline int K(void)\n {\n return _iK;\n }\n\n void setupParameterInit(PARAM_CAM *p);\n void initialize(void);\n//\tinline void initialize(void) {\n//\t\tfor(int iIdx = 0; iIdx < Dim(); iIdx++) {\n//\t\t\t_adb_X_t_t[iIdx] = _dParameterInit[iIdx];\n//\t\t}\n//\t}\n void setupParameterMinMax(PARAM_CAM *p);\n inline int Dim(void)\n {\n return _iDim;\n }\n inline BOOL Available(void)\n {\n return _bAvailable;\n }\n inline void Available(BOOL bV)\n {\n _bAvailable = bV;\n }\n inline int getCounterAfterInitialization(void)\n {\n return _iCounterAfterInitialization;\n }\n inline void clearCounterAfterInitialization(void)\n {\n _iCounterAfterInitialization = 0;\n }\n inline void incCounterAfterInitialization(void)\n {\n _iCounterAfterInitialization++;\n }\n inline double Param(int iIdx)\n {\n return _adb_X_t_t[iIdx];\n }\n inline void Param(int iIdx, double dV)\n {\n _adb_X_t_t[iIdx] = dV;\n }\n inline double ParamInit(int iIdx)\n {\n return _dParameterInit[iIdx];\n }\n inline void ParamInit(int iIdx, double dV)\n {\n _dParameterInit[iIdx] = dV;\n }\n inline double ParamMin(int iIdx)\n {\n return _dParameterMin[iIdx];\n }\n inline void ParamMin(int iIdx, double dV)\n {\n _dParameterMin[iIdx] = dV;\n }\n inline double ParamMax(int iIdx)\n {\n return _dParameterMax[iIdx];\n }\n inline void ParamMax(int iIdx, double dV)\n {\n _dParameterMax[iIdx] = dV;\n }\n\n inline void Param_tn_t(int iIdx, double dV)\n {\n _adb_X_tn_t[iIdx] = dV;\n }\n double LaneBoundaryPositionOnRoad(int iK, double dZ);\n double LaneBoundaryPositionOnRoad(int iK, double dZ, double *pdLaneParameter);\n double CurbPositionOnRoad(int iK, double dZ);\n double CurbPositionOnRoad(int iK, double dZ, double *pdLaneParameter);\n double LaneBoundaryPositionOnImagePixel(PARAM_CAM *p, int iSrc);\n double LaneBoundaryPositionOnImagePixelByNearAreaParameter(PARAM_CAM *pParamCam, int iISrc);\n double LaneBoundaryPositionOnImagePixel(PARAM_CAM *p, int iSrc, double dXVehicle);\n double CurbPositionOnImagePixel(PARAM_CAM *p, int iSrc);\n inline BOOL getNotAddSystemNoise(void)\n {\n return _bNotAddSystemNoise;\n }\n inline void setNotAddSystemNoise(void)\n {\n _bNotAddSystemNoise = TRUE;\n }\n inline void clearNotAddSystemNoise(void)\n {\n _bNotAddSystemNoise = FALSE;\n }\n\n//\tLaneParameterEstimator();\n//\t~LaneParameterEstimator();\n\n double *getX_t_t(void)\n {\n return _adb_X_t_t;\n }\n double *getX_t_t_NearArea(void)\n {\n return _adb_X_t_t_NearArea;\n }\n inline void saveX_t_t_NearArea(void)\n {\n dmmove(_adb_X_t_t, _adb_X_t_t_NearArea, CS4_ONESIDE_STATUS_NUMBER, 1);\n }\n\n void calc_DM_a_b(void);\n void get_U_t(void);\n void update_t(void);\n//\tvoid set_y_t(void);\n//\tvoid set_y_t(int s4_side, int s4_iIdx);\n void set_y_t(int s4_side, LaneMarkerPoint *pLMP);\n void calc_Ht(PARAM_CAM *pParamCam);\n void calc_Ft(void);\n void calc_D_t(void);\n void calc_G_t(void);\n void calc_Kt(void);\n void calc_Kt2(void);\n void calc_P_t_t(void);\n void calc_h_t(PARAM_CAM *pParamCam);\n double get_HX_t_DM(PARAM_CAM *pParamCam, S4 s4_a_isrc, S4 s4_a_side);\n void calc_X_t_t(void);\n void calc_P_tn_t(void);\n void calc_X_tn_t(void);\n void calc_P_0(void);\n void calc_Q_t(void);\n void calc_Q_0(void);\n void calc_R_t(void);\n\n\n int update(PARAM_CAM *pParamCam, LaneMarkerPoints *pBoundaryPoints);\n unsigned long isInvalidWithMinAndMax(void);\n\n#ifdef\tUSE_DYNAMIC_SYSTEM_NOISE\n inline int *geDynamicNoiseFlag(void)\n {\n return _aiDynamicNoiseFlag;\n }\n void DynamicNoise(void);\n inline void resetDynamicNoiseFlag(void)\n {\n for(int iIdx = 0; iIdx < CS4_ONESIDE_STATUS_NUMBER;\tiIdx++)\n {\n _aiDynamicNoiseFlag[iIdx] = TRUE;\n }\n }\n inline void clearDynamicNoiseFlag(int iIdx)\n {\n _aiDynamicNoiseFlag[iIdx] = FALSE;\n }\n inline void setDynamicNoiseFlag(int iIdx)\n {\n _aiDynamicNoiseFlag[iIdx] = TRUE;\n }\n#endif\tUSE_DYNAMIC_SYSTEM_NOISE\n\n\n inline double getDetectedDistance(void)\n {\n return _dDetectedDistance;\n }\n inline void setDetectedDistance(double dV)\n {\n _dDetectedDistance = dV;\n }\n inline double getDeltaOffsetCurb(void)\n {\n return _dDeltaOffsetCurb;\n }\n inline void setDeltaOffsetCurb(double dV)\n {\n _dDeltaOffsetCurb = dV;\n }\n\n};\n"
},
{
"alpha_fraction": 0.5629348754882812,
"alphanum_fraction": 0.5705249905586243,
"avg_line_length": 20.457014083862305,
"blob_id": "93702afd22daf5c0a761fe7abc2f3b32c26ac882",
"content_id": "5b40e7dbc65d147c24a83fab3bfbe5c09bb66b44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6505,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 221,
"path": "/athena/core/x86/Planning/include/park/park.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 导航模块的公用数据\n * 包含对点、规划模块数据等描述,地图切片、路口红绿灯、停车泊车点等信息\n */\n\n\n#ifndef _PARK_H\n#define _PARK_H\n\n#include <stdint.h>\n#include <vector>\n#include <string>\n#include \"common/navi_point.h\"\n#include \"common/path.h\"\n#include \"common/car_state.h\"\n#include \"spline/math_tools.h\"\n\nusing namespace std;\n\n/**\n* @class StopPark\n* @brief 停车点/泊车点信息\n*/\nclass StopPark\n{\npublic:\n /**\n * @brief 构造函数\n */\n StopPark();\n /**\n * @brief 析构函数\n */\n ~StopPark();\n\n /**\n * @brief 对等于号=进行重载,StopPark类的等号操作符。\n * @param src 输入量:原始输入的停车点/泊车点信息。\n * @return 被赋值的停车点/泊车点信息。\n */\n StopPark &operator=(const StopPark& src);\n\n /**\n * @brief 初始化函数。\n * @return 1成功,0失败。\n */\n int init();\n\npublic:\n double x_; ///<停车点/泊车点x坐标\n\n double y_; ///<停车点/泊车点y坐标\n\n double heading_; ///<停车点/泊车点方向\n\n double length_; ///<泊车位长度\n\n double width_; ///<泊车位宽度\n\n int32_t type_; ///<停车点/泊车点类型,一般停车点,垂直泊车点,水平泊车点\n\n// double offset_;\n};\n\n/**\n* @class IntelligentParking\n* @brief 生成泊车轨迹信息\n*/\nclass IntelligentParking : public StopPark\n{\npublic:\n /**\n * @brief 构造函数\n */\n IntelligentParking();\n /**\n * @brief 析构函数\n */\n ~IntelligentParking();\n\n /**\n * @brief 设置泊车点\n * @param stop_park 输入量:泊车点。\n * @return 1设置成功,0不成功。\n */\n int set_park_point( const StopPark& stop_park );\n\n /**\n * @brief 计算车当前停止的点(车后轴中心点)。\n * @param car_state 输入量:车辆状态。\n * @return 1计算成功,0不成功。\n */\n int compute_cur_stop_pose( const CarState& car_state );\n\n /**\n * @brief 根据停车位信息,计算倒车结束的点(车后轴中心点)。\n * @return 1计算成功,0不成功。\n */\n int compute_end_park_pose();\n\n /**\n * @brief 计算泊车轨迹。\n * @param car_state 输入量:车辆状态。\n * @return 1计算成功,0不成功。\n */\n int compute_parking_trajectory( const CarState& car_state );\n\n /**\n * @brief 计算平行泊车轨迹。\n * @return 1计算成功,0不成功。\n */\n int compute_parallel_parking_trajectory();\n\n /**\n * @brief 计算平行泊车轨迹。\n * @param ss 输入量:开始右打方向盘的横向距离\n * @param hh 输入量:开始右打方向盘的纵向距离\n * @param trajectory_d 输入量:泊车坐标系下,泊车轨迹中D档部分\n * @param trajectory_r 输入量:泊车坐标系下,泊车轨迹中R档部分\n * @return 1计算成功,0不成功。\n */\n int compute_parallel_parking_trajectory( double ss, double hh, path& trajectory_d, path& trajectory_r );\n\n /**\n * @brief 计算垂直泊车轨迹。\n * @return 1计算成功,0不成功。\n */\n int compute_vertical_parking_trajectory();\n\n /**\n * @brief 计算垂直泊车轨迹。\n * @param trajectory_d 输入量:泊车坐标系下,泊车轨迹中D档部分\n * @param trajectory_r 输入量:泊车坐标系下,泊车轨迹中R档部分\n * @return 1计算成功,0不成功。\n */\n int compute_vertical_parking_trajectory( path& trajectory_d, path& trajectory_r );\n\n /**\n * @brief 轨迹从泊车坐标系转换到全局坐标系。\n * @return 1计算成功,0不成功。\n */\n int transfer_trajectory_p2g();\n\n /**\n * @brief 轨迹从泊车坐标系转换到全局坐标系。\n * @param p_trajectory 输入量:泊车坐标系下轨迹\n * @param g_trajectory 输入量:全局坐标系下轨迹\n * @return 1计算成功,0不成功。\n */\n int transfer_trajectory_p2g( path p_trajectory, path& g_trajectory );\n\n /**\n * @brief 计算纵向信息\n * @param p_trajectory 输入量:轨迹\n * @param gear 输入量:档位\n * @return 1计算成功,0不成功。\n */\n int compute_longitudinal_info( path& p_trajectory, int8_t gear );\n\n /**\n * @brief 轨迹向前延长一定的点个数\n * @param p_trajectory 输入量:轨迹\n * @param extend_num 输入量:延长点的个数,单位:个\n * @param interval 输入量:取点间隔,单位:米\n * @param gear 输入量:档位\n * @return 1计算成功,0不成功。\n */\n int extend_trajectory( path& p_trajectory, int extend_num, double interval, int8_t gear );\n\n /**\n * @brief 获得泊车轨迹中D档部分\n * @param park_trajectory_d 输出量:全局坐标系下,泊车轨迹中D档部分\n * @return 1计算成功,0不成功。\n */\n int get_trajectory_d( path& park_trajectory_d );\n\n /**\n * @brief 获得泊车轨迹中R档部分。\n * @param park_trajectory_r 输出量:全局坐标系下,泊车轨迹中R档部分\n * @return 1计算成功,0不成功。\n */\n int get_trajectory_r( path& park_trajectory_r );\n\n /**\n * @brief 获得泊车轨迹中D档部分点的个数。\n * @return 点的个数。\n */\n int get_size_trajectory_d();\n\n /**\n * @brief 获得泊车轨迹中R档部分点的个数。\n * @return 点的个数。\n */\n int get_size_trajectory_r();\n\n\n\nprivate:\n Transfer transfer_pg_; ///<泊车坐标系和全局坐标系之间的转换\n\n //CarState car_state_; ///<车辆状态\n navi_point pose_current_stop_; ///<车当前停止的点\n navi_point pose_end_park_; ///<倒车结束的点\n\n navi_point p_pose_current_stop_; ///<泊车坐标系下,车当前停止的点\n navi_point p_pose_end_park_; ///<泊车坐标系下,倒车结束的点\n\n// CollisionCheck collision_check_; ///<泊车时碰撞检测\n\n path p_park_trajectory_d_; ///<泊车坐标系下,泊车轨迹中D档部分\n path p_park_trajectory_r_; ///<泊车坐标系下,泊车轨迹中R档部分\n\n path park_trajectory_d_; ///<泊车轨迹中D档部分\n path park_trajectory_r_; ///<泊车轨迹中R档部分\n\n\n};\n\n#endif //__PARK_H\n\n"
},
{
"alpha_fraction": 0.683953046798706,
"alphanum_fraction": 0.6927592754364014,
"avg_line_length": 19.646465301513672,
"blob_id": "8d69af48e2f12ad160422f0b892c41265389dd14",
"content_id": "604769f7a42e9ffb23cd5e2360aeba6b6f46ccaf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2150,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 99,
"path": "/athena/core/arm/Control/include/controller.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_CONTROLLER_H_\n#define COMMON_CONTROLLER_H_\n\n#include <iostream>\n#include <vector>\n#include \"chassis.h\"\n#include \"controller_agent.h\"\n#include \"controller_config.h\"\n#include \"localization.h\"\n#include \"trajectory.h\"\n#include \"controller_output.h\"\n#include \"debug_output.h\"\n\nusing namespace std;\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class controller\n * @brief controller base.\n */\nclass Controller{\n public:\n Controller() = default;\n ~Controller() = default;\n\n\n /**\n * @brief initialization.\n * @param[in] controller_config controller config.\n * @return true or false.\n */\n bool Init(const ControllerConfig controller_config);\n\n/**\n * @brief SetTrajectory.\n * @param[in] trajectory trajectory information.\n * @return void.\n */\n void SetTrajectory(const Trajectory *trajectory);\n\n/**\n * @brief ComputeControlOutput.\n * @param[in] localiation Location information.\n * @param[in] controller_output controller output.\n * @return true or false.\n */\n bool ComputeControlOutput(const Localization * localiation,const Chassis *chassis,ControllerOutput * controller_output);\n\n\n/**\n * @brief GetControllerInfo.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n void GetControllerInfo(DebugOutput &debug_output);\n\n/**\n * @brief SetDrivingModeDebug 设置驾驶模式用作调试.\n * @param[in] mode 0 无效 1 人工驾驶 3 自动驾驶.\n * @return void.\n */\n void SetDrivingModeDebug(int32_t mode);\n\n/**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n void SetTarSpeedDebug(int32_t tar_speed,bool valid);\n\n/**\n * @brief GetAlarmInfo 获取报警信息.\n * @param[out] alarm_list 报警信息.\n * @return void.\n */\n void GetAlarmTableInfo(std::vector<ControllerOutputAlarm::AlarmInfoTable> *alarm_list);\n private:\n ///控制器代理\n ControllerAgent controller_agent_;\n};\n}\n}\n\n#endif // COMMON_CONTROLLER_H_\n"
},
{
"alpha_fraction": 0.6573670506477356,
"alphanum_fraction": 0.6639058589935303,
"avg_line_length": 31.30281639099121,
"blob_id": "b6ad56ba2c7b5a537ef5cf9aa10a8614df3e087a",
"content_id": "ffbb6ed4115d142e96819a942a96d3e8d4434214",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4588,
"license_type": "no_license",
"max_line_length": 215,
"num_lines": 142,
"path": "/athena/examples/ROS/src/Perception/ssd_detection/src/ssd_detection.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <ros/ros.h>\n#include <image_transport/image_transport.h>\n#include <cv_bridge/cv_bridge.h>\n#include <sensor_msgs/image_encodings.h>\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n#include <opencv2/opencv.hpp> \n\n\n#include \"ssd_detection/vision_detect_node.hpp\"\n#include \"ssd_detection/kf_tracker.hpp\"\n\n#include <ssd_detection/SSD_Objects.h>\n\nusing namespace caffe;\nusing namespace std;\n\n\nclass VisionDetector\n{\npublic:\n\tVisionDetector(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private,std::string config_file);\n\nprotected:\n\tvoid onImageCallback(const sensor_msgs::Image::ConstPtr& msg);\n\nprivate:\n\n\tros::NodeHandle nh_;\n\tros::NodeHandle nh_private_;\n\tros::Publisher ssd_detection_result_;\n\n\timage_transport::ImageTransport it_;\n \timage_transport::Subscriber image_sub_;\n \timage_transport::Publisher image_pub_;\n\n\tVisiondetect caffe_detect_;\n};\n\n\nVisionDetector::VisionDetector(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private,std::string config_file)\n\t:nh_(nh)\n\t,nh_private_(nh_private)\n\t,it_(nh_)\n\t,caffe_detect_(Visiondetect(config_file))\n{\n\tROS_INFO(\"ROS ssd init begin.\");\n\t//Caffe detector\n\tcaffe_detect_.read_config_value_from_file();\n\tROS_INFO(\"ROS ssd init 1 .\");\n \tcaffe_detect_.ssd_detector_ = new Detector(caffe_detect_.network_Model, caffe_detect_.pretrained_Weights,caffe_detect_.mean_file_,caffe_detect_.pixel_mean_,caffe_detect_.use_gpu, \t\tcaffe_detect_.gpu_device_id);\n\n\tROS_INFO(\"ROS ssd init 2.\");\n\t//ROS\n\timage_sub_ = it_.subscribe(\"/usb_cam/image_raw\",1,&VisionDetector::onImageCallback,this);\n\timage_pub_ = it_.advertise(\"/ssd_detection_result\", 1);\n\n\tssd_detection_result_ = nh_.advertise<ssd_detection::SSD_Objects>(\"ssd_detection_objects\",1);\n\tROS_INFO(\"ROS ssd init success.\");\n}\n\nvoid VisionDetector::onImageCallback(const sensor_msgs::Image::ConstPtr& msg)\n{\n\tcv_bridge::CvImagePtr cv_ptr;\n \ttry\n \t{\n \t\tcv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);\n \t}\n \tcatch (cv_bridge::Exception& e)\n \t{\n \t\tROS_ERROR(\"cv_bridge exception: %s\", e.what());\n \t\treturn;\n \t}\n\n\t//ssd detection\n\tdouble t = (double)cv::getTickCount();\n \tvector<camera_obj> get_cam_objs;\n \tcaffe_detect_.convert_rect_to_image_obj(cv_ptr->image,get_cam_objs);\n \tt = ((double)cv::getTickCount() - t) / cv::getTickFrequency();\n\n\n\t//ROS_INFO(\"object size: %d\",static_cast<int>(get_cam_objs.size()));\n\tssd_detection::SSD_Objects ssd_object_list;\n\tssd_detection::SSD_Object ssd_object;\n\tfor(vector<camera_obj>::iterator iter = get_cam_objs.begin(); iter != get_cam_objs.end(); iter++)\n\t{\n\t\tssd_object.id = iter->id;\n\t\tssd_object.stability = iter->stability;\n\t\tssd_object.classification = iter->classification; \t\n\t\tssd_object.detection_status = iter->detection_status; \n\t\tssd_object.lat_pos = iter->lat_pos; \n\t\tssd_object.lat_rate = iter->lat_rate; \n\t\tssd_object.lon_pos = iter->lon_pos;\n\t\tssd_object.lon_rate = iter->lon_rate;\n\t\tssd_object.width = iter->width; \n\t\tssd_object.score = iter->score; \n\t\t \n\t\tgeometry_msgs::Point32 pointf;\n\t\tpointf.x = iter->box_point.x;\n\t\tpointf.y = iter->box_point.y;\n\t\tpointf.z = 0.0;\n\t\tssd_object.obj_rect.pointf = pointf;\n\t\tssd_object.obj_rect.width = iter->box_point.width;\n\t\tssd_object.obj_rect.height = iter->box_point.height; \n\t\tssd_object.real_data = iter->real_data; \n\t\tssd_object.lifespan = iter->lifespan;\n\n\t\tssd_object_list.objects.push_back(ssd_object);\n\t}\n\tssd_object_list.header.frame_id = \"ssd\";\n\tssd_object_list.header.stamp = msg->header.stamp;\n\tssd_detection_result_.publish(ssd_object_list);\n\n \tfloat fps=1.0 / t+2;\n \tstring String =\"FPS:\"+ static_cast<ostringstream*>( &(ostringstream() << fps) )->str();\n \tcv::putText(cv_ptr->image,String,cvPoint(0,40),CV_FONT_HERSHEY_PLAIN,2,cv::Scalar(255, 255, 0),2);\n\n\t// Output modified video stream\n\timage_pub_.publish(cv_ptr->toImageMsg());\n}\n\n\nint main(int argc, char **argv)\n{\n\tros::init(argc, argv,\"ssd_detection_node\");\n\tros::NodeHandle nh;\n\tros::NodeHandle nh_private(\"~\");\n\n\tstd::string detection_config_file = \"\";\n\tnh_private.getParam(\"detection_config_file\", detection_config_file);\n\tROS_INFO(\"config file: %s\",detection_config_file.c_str());\n\tif(detection_config_file == \"\")\n\t{\n\t\tROS_ERROR(\"Please select detection config file\");\n\t\treturn -1;\n\t}\n\tVisionDetector visionDetector(nh,nh_private,detection_config_file);\n\n\tros::spin();\n\n\treturn 0;\n}\n\n"
},
{
"alpha_fraction": 0.48552337288856506,
"alphanum_fraction": 0.5011135935783386,
"avg_line_length": 11.472222328186035,
"blob_id": "a6dcfeb62456f9edaab6491152dea6be23f92162",
"content_id": "8847150b3f32656d4d3d50b38fa2e2c471280e53",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 449,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 36,
"path": "/athena/examples/LCM/Singlecar/control/common/Thread.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"Thread.h\"\n \nThread::Thread(){\n \n}\n\nThread::~Thread(){ \n}\n\n \nvoid* Thread::run0(void* opt)\n{\n Thread* p = (Thread*) opt;\n p->run1();\n return p;\n}\n\nvoid* Thread::run1()\n{\n _tid = pthread_self();\n run();\n _tid = 0;\n pthread_exit(NULL);\n}\n \nbool Thread::start()\n{\n return pthread_create(&_tid, NULL, run0, this) == 0;\n}\n\nvoid Thread::join()\n{\n if( _tid > 0 ){\n pthread_join(_tid, NULL);\n }\n }\n"
},
{
"alpha_fraction": 0.6841517686843872,
"alphanum_fraction": 0.691220223903656,
"avg_line_length": 20.33333396911621,
"blob_id": "582a140b7f81e42c4362c6d306d5a889e5b99890",
"content_id": "457eff16319fac4dd42c6a747ec9f3cfdeae976e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2966,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 126,
"path": "/athena/core/x86/Control/include/lon_controller/lon_controller.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n/**\n * @file\n * @brief Defines the LonController class.\n */\n#ifndef CONTROLLER_LON_CONTROLLER_LONCONTROLLER_H_\n#define CONTROLLER_LON_CONTROLLER_LONCONTROLLER_H_\n\n#include \"vehicle_dynamics.h\"\n#include \"../generic_controller.h\"\n#include \"../gear_position.h\"\n#include \"../common/map_matching/map_matching.h\"\n\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class LonController\n *\n * @brief Longitudinal controller, to compute brake and driving force values.\n */\nclass LonController:public GenericController{\n public:\n\n /**\n * @brief constructor\n */\n LonController() = default;\n\n /**\n * @brief destructor\n */\n ~LonController() = default;\n\n /**\n * @brief init.\n * @param[in] controller_config controller config.\n * @return true or false.\n */\n bool Init(const ControllerConfig controller_config);\n\n /**\n * @brief ComputeControlOutput.\n * @param[in] path 轨迹.\n * @param[in] match_point_no 轨迹匹配索引.\n * @param[in] local_localiation 地图定位信息.\n * @param[in] chassis 车辆底盘信息.\n * @param[in] ControllerOutput 控制器输出.\n * @return true or false.\n */\n bool ComputeControlOutput(path * path,uint32_t match_point_no,const LocalLocalization * local_localiation,const Chassis *chassis,ControllerOutput * controller_output);\n\n\n /**\n * @brief 获取调试信息.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n void GetControllerDebugInfo(DebugOutput &debug_output);\n\n\n/**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n void SetTarSpeedDebug(int32_t tar_speed,bool valid);\n\n private:\n ///车辆动力学\n VehicleDynamics vehicle_dynamics_;\n ///车辆动力学输入\n VehicleDynamicsInput vehicle_dynamics_input_;\n ///纵向控制车辆动力学输入\n //VehicleDynamicsInput vehicle_dynamics_input;\n ///基本配置\n ControllerConfig controller_config_;\n ///预描距离\n double pre_draw_distance;\n ///纵向状态\n int32_t lon_status;\n ///目标加速度值\n double tar_acceleration;\n\n ///纵向速度调试\n bool tar_speed_debug_valid_;\n double tar_speed_debug;\n\n typedef enum{\n ACCELERATE,/**< 加速*/\n HOLD ,/**< 保持*/\n BRAKE ,/**< 减速*/\n }LonStatus;\n\n/**\n * @brief AcceleratorSwitch 油门和刹车切换策略.\n * @param[in] pre_draw_end_speed 预描点处的速度.\n * @return void\n */\n void AcceleratorSwitch(double pre_draw_end_speed);\n\n /**\n * @brief ComputeEpbOutput 计算EPB控制输出.\n * @param[in] current_speed 当前速度.\n * @param[in] current_gear_pos 当前档杆位置.\n * @return true:EPB使能 false:EPB禁能\n */\n bool ComputeEpbOutput(double current_speed,int32_t current_gear_pos);\n};\n}//namespace control\n}//namespace athena\n#endif //CONTROLLER_LATERAL_LAT_CONTROLLER_H\n"
},
{
"alpha_fraction": 0.47989949584007263,
"alphanum_fraction": 0.47989949584007263,
"avg_line_length": 31.189189910888672,
"blob_id": "b758349e9e4007a974765b79f3d42d71ec61c499",
"content_id": "caadebb1cf9da6049889a580a4f6674c2a565549",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1414,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 37,
"path": "/athena/core/arm/Planning/include/common/path_tools.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n//#include \"ecu.h\"\n#include \"common/navi_point.h\"\n#include \"common/path.h\"\n\nvoid alloc_lane_size(path& lane, int length);\n\nvoid cau_self_lane_offset(path& lane, //要考过来的轨迹\n double offset); //拷贝到目标的起点\n\nvoid cau_current_lane(path& lane, //要考过来的轨迹\n path& ref_lane, //从参考的轨迹中考出\n int st_pos, int en_pos, //要拷贝过来的位置\n double offset, //需要增加的偏移量\n int start_num); //拷贝到目标的起点\n\nvoid cau_current_lane(path& lane, //要考过来的轨迹\n path& ref_lane, //从参考的轨迹中考出\n int st_pos, int en_pos, //要拷贝过来的位置\n double offset); //需要增加的偏移量\n\ndouble find_ref_offset(double x, double y, double yaw,\n path& p, int st_pos);\n\n\n\nvoid copy_path_into_virtual_lane( path& p, //要考过来的轨迹\n int start_pos, //要拷贝过来的位置\n int en_pos,\n int into_pos, //拷贝到目标的起点\n path& virtual_lane);\n\n\nvoid append_tail_to_virtual_lane(\n path& tail_path,\n path& virtual_lane);\n\n\n\n"
},
{
"alpha_fraction": 0.6340826153755188,
"alphanum_fraction": 0.6415751576423645,
"avg_line_length": 30.021621704101562,
"blob_id": "b97be04326290884fb31297453b2a86e732b1d86",
"content_id": "f1ce9d7d3d0c142d56879b33ae905ec1175e1521",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5739,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 185,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/GridMap1D.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "class GridMap1D {\nprivate:\n\tdouble _dXmin;\n\tdouble _dXmax;\n\tdouble _dXstep;\n\tLaneMarkerPoints *_pLMPs;\n\tint _iIsrcMin;\n\tint _iIsrcMax;\n\tint _iGridNumber;\n\tdouble _adInitialOffset[LR_NUM];\n\tFlexArray<double> *_pYmins;\n\tdouble\t*_pdMovingAverage;\n\tBOOL *_pbStep;\npublic:\n\tinline GridMap1D(void)\t{\n\t\t_dXmin = 0;\n\t\t_dXmax = 0;\n\t\t_dXstep = 0;\n\t\t_pLMPs = NULL;\n\t\t_iIsrcMin = 0;\n\t\t_iIsrcMax = 0;\n\t\t_iGridNumber = 0;\n\t\tfor(int iLR = LR_LEFT; iLR < LR_NUM; iLR++)\t\t{\t_adInitialOffset[iLR] = 0;\t}\n\t\t_pYmins = NULL;\n\t\t_pdMovingAverage = NULL;\n\t\t_pbStep = NULL;\n\t}\n\tinline ~GridMap1D(void)\t{\n\t\tSAFE_DELETE_ARRAY(_pLMPs);\n\t\tSAFE_DELETE_ARRAY(_pYmins);\n\t\tSAFE_DELETE_ARRAY(_pdMovingAverage);\n\t\tSAFE_DELETE_ARRAY(_pbStep);\n\t}\n\tinline double getXmin(void)\t{\treturn _dXmin;\t}\n\tinline double getXmax(void)\t{\treturn _dXmax;\t}\n\tinline double getXstep(void)\t{\treturn _dXstep;\t}\n\tinline void setXmin(double dV)\t{\t_dXmin = dV;\t}\n\tinline void setXmax(double dV)\t{\t_dXmax = dV;\t}\n\tinline void setXstep(double dV)\t{\t_dXstep = dV;\t}\n\tinline int getIsrcMin(void)\t{\treturn\t_iIsrcMin;\t}\n\tinline int getIsrcMax(void)\t{\treturn\t_iIsrcMax;\t}\n\tinline void setIsrcMin(int iV)\t{\t_iIsrcMin = iV;\t}\n\tinline void setIsrcMax(int iV)\t{\t_iIsrcMax = iV;\t}\n\tinline int getGridNumber(void)\t{\treturn _iGridNumber;\t}\n\tinline void setGridNumber(int iV)\t{\t_iGridNumber = iV;\t}\n\tinline double getInitialOffset(int iLR)\t{\treturn _adInitialOffset[iLR];\t}\n\tinline void setInitialOffset(int iLR, double dV)\t{\t_adInitialOffset[iLR] = dV;\t}\n\tinline FlexArray<double> *getYmins(int iIdx)\t{\treturn (_pYmins + iIdx);\t}\n\tinline double getMovingAverage(int iIdx)\t{\treturn *(_pdMovingAverage + iIdx);\t}\n\tinline void setMovingAverage(int iIdx, double dV)\t{\t*(_pdMovingAverage + iIdx) = dV;\t}\n\tinline BOOL getStepFlag(int iIdx)\t{\treturn *(_pbStep + iIdx);\t}\n\tinline void setStepFlag(int iIdx, BOOL bV)\t{\t*(_pbStep + iIdx) = bV;\t}\n\tinline LaneMarkerPoints *getGrid(int iIdx) {\n\t\tif(iIdx < 0)\treturn NULL;\n\t\tif(iIdx >= getGridNumber())\treturn NULL;\n\t\treturn (_pLMPs + iIdx);\n\t}\n\n\tinline void initlize(void) {\n\t\tSAFE_DELETE_ARRAY(_pLMPs);\n\t\tsetXmin(0);\n\t\tsetXmax(0);\n\t\tsetXstep(0);\n\t\tsetGridNumber(0);\n\t\tsetIsrcMin(0);\n\t\tsetIsrcMax(0);\n\n\t\tSAFE_DELETE_ARRAY(_pYmins);\n\t\tfor(int iLR = LR_LEFT; iLR < LR_NUM; iLR++)\t\t{\t_adInitialOffset[iLR] = 0;\t}\n\t\t_pLMPs = NULL;\n\t\tSAFE_DELETE_ARRAY(_pdMovingAverage);\n\t\tSAFE_DELETE_ARRAY(_pbStep);\n\t}\n\tinline void setup(double dXmin, double dXmax, double dXstep) {\n\t\tSAFE_DELETE_ARRAY(_pLMPs);\n\t\tif(dXmin >= dXmax)\treturn;\n\t\tif(dXstep <= 0)\treturn;\n\t\tsetXmin(dXmin);\n\t\tsetXmax(dXmax);\n\t\tsetXstep(dXstep);\n\t\tint iNumber = (int)((getXmax() - getXmin()) / getXstep() + 0.5);\n\t\tsetGridNumber(iNumber);\n\t\t_pLMPs = new LaneMarkerPoints[iNumber];\n\n\t\tSAFE_DELETE_ARRAY(_pYmins);\n\t\t_pYmins = new FlexArray<double>[iNumber];\n\t\tfor(int iLR = LR_LEFT; iLR < LR_NUM; iLR++)\t\t{\t_adInitialOffset[iLR] = 0;\t}\n\t\tSAFE_DELETE_ARRAY(_pdMovingAverage);\n\t\t_pdMovingAverage = new double[iNumber];\n\t\t_pbStep = new BOOL[iNumber];\n\t\tfor(int iIdx = 0; iIdx < iNumber; iIdx++) {\t_pbStep[iIdx] = FALSE;\t}\n\t}\n\tinline int getGridIdx(double dX) {\n\t\tif(dX < getXmin())\treturn -1;\n\t\tif(dX > getXmax())\treturn -1;\n\t\tint iIdx = (int)((dX - getXmin()) / getXstep());\n\t\treturn iIdx;\n\t}\n\tinline LaneMarkerPoints *getGrid(double dX) {\n\t\tif(dX < getXmin())\treturn NULL;\n\t\tif(dX > getXmax())\treturn NULL;\n\t\tint iIdx = (int)((dX - getXmin()) / getXstep());\n\t\treturn (_pLMPs + iIdx);\n\t}\n\tinline void vote(LaneMarkerPoint *pLMP, double dOffset) {\n\t\tif(pLMP == NULL)\treturn;\n\t\tdouble dX = pLMP->X3D();\n\t\tdX += dOffset;\n\t\tLaneMarkerPoints *pLMPs =getGrid(dX);\n\t\tif(pLMPs == NULL)\treturn;\n\t\tpLMPs->addLaneMarkerPoint(pLMP);\n\t}\n\tinline BOOL calcMovingAverage(void)\t{\n\t\tint iGridNumber = getGridNumber();\n\t\tfor(int iIdx = 0; iIdx < iGridNumber; iIdx++) {\n\t\t\tFlexArray<double> *pfaYminsTmp = new FlexArray<double>;\n\t\t\tFlexArray<double> *pfaYmins = getYmins(iIdx);\n\t\t\tdouble dYmin = GM_MOVINGAVERAGE_INVALID_VALUE;\n\t\t\tfor(int iIdx1 = 0; iIdx1 < pfaYmins->getNumber(); iIdx1++) {\n\t\t\t\tdouble dValue = pfaYmins->get(iIdx1);\n\t\t\t\tif(dValue >= GM_MOVINGAVERAGE_INVALID_VALUE) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tpfaYminsTmp->add(dValue);\n\t\t\t}\n\t\t\tif(pfaYminsTmp->getNumber() < 3) {\n\t\t\t\tdYmin = GM_MOVINGAVERAGE_INVALID_VALUE;\n\t\t\t} else {\n\t\t\t\t// sorting\n\t\t\t\tfor(int iIdxA = 0; iIdxA < pfaYminsTmp->getNumber() - 1; iIdxA++) {\n\t\t\t\t\tdouble dV_A = pfaYminsTmp->get(iIdxA);\n\t\t\t\t\tfor(int iIdxB = iIdxA + 1; iIdxB < pfaYminsTmp->getNumber(); iIdxB++) {\n\t\t\t\t\t\tdouble dV_B = pfaYminsTmp->get(iIdxB);\n\t\t\t\t\t\tif(dV_A > dV_B) {\n\t\t\t\t\t\t\tpfaYminsTmp->set(iIdxA, dV_B);\n\t\t\t\t\t\t\tpfaYminsTmp->set(iIdxB, dV_A);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// median value\n\t\t\t\tdYmin = pfaYminsTmp->get(pfaYminsTmp->getNumber() / 2);\n\t\t\t}\n\t\t\tsetMovingAverage(iIdx, dYmin);\n\t\t\tSAFE_DELETE(pfaYminsTmp);\n\t\t}\n\t\treturn TRUE;\n\t}\n\tinline BOOL searchStep(double dTh) {\n\t\tint iCenter = getGridNumber() / 2;\n\t\tdouble dYPrev = getMovingAverage(iCenter);\n\t\tsetStepFlag(iCenter, FALSE);\n\t\tfor(int iIdx = iCenter - 1; iIdx >= 0; iIdx--) {\n\t\t\tsetStepFlag(iIdx, FALSE);\n\t\t\tdouble dY = getMovingAverage(iIdx);\n\t\t\tif(\tdYPrev\t!= GM_MOVINGAVERAGE_INVALID_VALUE\n\t\t\t&&\tdY\t\t!= GM_MOVINGAVERAGE_INVALID_VALUE) {\n\t\t\t\tif(\t(\t(-(dY - dYPrev) >= dTh)\n\t\t\t\t\t&&\t(-dY >= dTh)\n\t\t\t\t\t)\n\t\t\t\t||\t(-dY >= dTh * 2)\n\t\t\t\t) {\n\t\t\t\t\tsetStepFlag(iIdx, TRUE);\n\t\t\t\t}\n\t\t\t}\n\t\t\tdYPrev = dY;\n\t\t}\n\t\tdYPrev = getMovingAverage(iCenter);\n\t\tfor(int iIdx = iCenter + 1; iIdx < getGridNumber(); iIdx++) {\n\t\t\tsetStepFlag(iIdx, FALSE);\n\t\t\tdouble dY = getMovingAverage(iIdx);\n\t\t\tif(\tdYPrev\t!= GM_MOVINGAVERAGE_INVALID_VALUE\n\t\t\t&&\tdY\t\t!= GM_MOVINGAVERAGE_INVALID_VALUE) {\n\t\t\t\tif(\t(\t(-(dY - dYPrev) >= dTh)\n\t\t\t\t\t&&\t(-dY >= dTh)\n\t\t\t\t\t)\n\t\t\t\t||\t(-dY >= dTh * 2)\n\t\t\t\t) {\n\t\t\t\t\tsetStepFlag(iIdx, TRUE);\n\t\t\t\t}\n\t\t\t}\n\t\t\tdYPrev = dY;\n\t\t}\n\t\treturn TRUE;\n\t}\n};\n"
},
{
"alpha_fraction": 0.4245535731315613,
"alphanum_fraction": 0.5223214030265808,
"avg_line_length": 18.64912223815918,
"blob_id": "0471a117028e3c7dceb64c1d0fae32935aba7f0c",
"content_id": "5a4a0c853df16a74cea4087de969db8407adb8f8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2240,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 114,
"path": "/athena/core/x86/Planning/include/common/color_util.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _COLOR_UTIL\n#define _COLOR_UTIL\n\n#include \"math_util.h\"\n\nstatic int ColorTableShot[8][3]=\n{\n// {255,255,255},\n {255,0,122},\n {255,255,0},\n {255,0,255},\n {0,255,255},\n {0,0,255},\n {255,122,0},\n// {255,0,0},\n {0,255,0},\n {139,0,139}\n\n};\n\nstatic int ColorTableCircle[16][3]=\n{\n {255,255,255},\n {255,122,255},\n {255,255,0},\n {255,122,0},\n {255,0,255},\n {255,0,122},\n {0,255,255},\n {0,255,122},\n {0,0,255},\n {0,0,122},\n {255,0,0},\n {122,0,0},\n {0,122,0},\n {0,255,0},\n {139,0,139},\n {139,122,139}\n\n};\n\n#define JET_COLORS_LUT_SIZE 1024\nstatic float jet_colors[JET_COLORS_LUT_SIZE][3];\nstatic int jet_colors_initialized = 0;\n\nstatic inline void color_util_rand_color(float f[4],\n double alpha,\n double min_intensity)\n{\n f[3] = alpha;\n\nagain:\n f[0] = randf();\n f[1] = randf();\n f[2] = randf();\n\n float v = f[0] + f[1] + f[2];\n\n // reject colors that are too dark\n if (v < min_intensity)\n goto again;\n}\n\n/** Given an array of colors, a palette is created that linearly interpolates through all the colors. **/\nstatic void color_util_build_color_table(double color_palette[][3],\n int palette_size,\n float lut[][3],\n int lut_size)\n{\n for (int idx = 0; idx < lut_size; idx++)\n {\n double znorm = ((double) idx) / lut_size;\n\n int color_index = (palette_size - 1) * znorm;\n double alpha = (palette_size - 1) * znorm - color_index;\n\n for (int i = 0; i < 3; i++)\n {\n lut[idx][i] = color_palette[color_index][i] * (1.0 - alpha) + color_palette[color_index+1][i]*alpha;\n }\n }\n}\n\n\nstatic void init_color_table_jet()\n{\n double jet[][3] = {{ 0, 0, 1 },\n { 0, .5, .5 },\n { .8, .8, 0 },\n { 1, 0, 0 }\n };\n\n color_util_build_color_table(\n jet,\n sizeof(jet)/(sizeof(double)*3),\n jet_colors,\n JET_COLORS_LUT_SIZE);\n\n jet_colors_initialized = 1;\n}\n\nstatic inline float *color_util_jet(double v)\n{\n if (!jet_colors_initialized)\n init_color_table_jet();\n\n v = fmax(0, v);\n v = fmin(1, v);\n\n int idx = (JET_COLORS_LUT_SIZE - 1) * v;\n return jet_colors[idx];\n}\n\n#endif\n"
},
{
"alpha_fraction": 0.5472555756568909,
"alphanum_fraction": 0.5599220395088196,
"avg_line_length": 24.23770523071289,
"blob_id": "ebc443142956f8b90af559acc191bb872afe6f9e",
"content_id": "44bb0b79154360ea2dc03e4720fed3fe3c2aa9d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4535,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 122,
"path": "/athena/core/arm/Planning/include/spline/math_tools.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 该库提供了一些数学计算工具,\n * 如角度弧度的转换,方向角的计算,局部坐标系和全局坐标系之间的转换。\n */\n\n #ifndef MATH_TOOLS_H\n #define MATH_TOOLS_H\n\n\n#include <math.h>\n//#include <vector>\n#include \"common/navi_point.h\"\n\n\n/**\n* @brief 角度转弧度。\n* @param d 输入量:角度。\n* @return 弧度。\n*/\ndouble d2r( double d );\n\n/**\n* @brief 弧度转角度。\n* @param r 输入量:弧度。\n* @return 角度。\n*/\ndouble r2d( double r );\n\n/**\n* @brief 计算从点(x1,y1)到点(x2,y2)的方向角。\n* @param x1 输入量:起点的x坐标。\n* @param y1 输入量:起点的y坐标。\n* @param x2 输入量:终点的x坐标。\n* @param y2 输入量:终点的y坐标。\n* @return 从点(x1,y1)到点(x2,y2)的方向角。\n*/\ndouble get_heading( double x1, double y1,\n double x2, double y2 );\n\n/**\n * @class Transfer\n * @brief 局部坐标系和全局坐标系之间的转换。\n */\nclass Transfer\n{\n public:\n /**\n * @brief 构造函数\n */\n Transfer();\n /**\n * @brief 析构函数\n */\n virtual ~Transfer();\n\n /**\n * @brief 设置局部坐标系的原点坐标和y轴指向,\n * 以(x0,y0)为原点,angle为x轴指向,建立坐标系。\n * @param x0 输入量:局部坐标系原点x坐标(全局坐标系下)。\n * @param y0 输入量:局部坐标系原点y坐标(全局坐标系下)。\n * @param angle 输入量:局部坐标系y轴指向。\n */\n int set_origin( double x0, double y0, double angle );\n\n /**\n * @brief 将局部坐标系的坐标转换到全局坐标系下。\n * @param x_local 输入量:局部坐标系x坐标\n * @param y_local 输入量:局部坐标系y坐标\n * @param x_global 输出量:全局坐标系x坐标\n * @param y_global 输出量:全局坐标系y坐标\n */\n int transfer_l2g( double x_local, double y_local, double &x_global, double &y_global );\n\n /**\n * @brief 将局部坐标系的坐标及方向角转换到全局坐标系下。\n * @param x_local 输入量:局部坐标系x坐标\n * @param y_local 输入量:局部坐标系y坐标\n * @param h_local 输入量:局部坐标系方向角\n * @param x_global 输出量:全局坐标系x坐标\n * @param y_global 输出量:全局坐标系y坐标\n * @param h_global 输出量:全局坐标系方向角\n */\n int transfer_l2g( double x_local, double y_local, double h_local,\n double &x_global, double &y_global, double &h_global );\n\n /**\n * @brief 将局部坐标系的导航点坐标,航向角和曲率转换到全局坐标系下。\n * @param p_local 输入量:局部坐标系的导航点\n * @param p_global 输出量:全局坐标系的导航点\n */\n int transfer_l2g( navi_point p_local, navi_point &p_global );\n\n /**\n * @brief 将全局坐标系的导航点坐标,航向角和曲率转换到局部坐标系下。\n * 全局坐标系下两点,以其中一点为原点,两点连线为x轴,建立局部坐标系,\n * 并将两点的全局坐标转换到该局部坐标系下。\n * @param length 输入量:两点之间的欧几里得距离。\n * @param p0_global 输入量:全局坐标系下起点。\n * @param p1_global 输入量:全局坐标系下终点。\n * @param p0_local 输出量:局部坐标系下起点。\n * @param p1_local 输出量:局部坐标系下终点。\n */\n int transfer_g2l( double length,\n navi_point p0_global, navi_point p1_global,\n navi_point &p0_local, navi_point &p1_local );\n\n /**\n * @brief 将全局坐标系的导航点坐标,航向角和曲率转换到局部坐标系下。\n * @param point_global 输入量:全局坐标系下起点。\n * @param point_local 输出量:局部坐标系下终点。\n */\n int transfer_g2l( navi_point point_global, navi_point &point_local );\n\n private:\n double m_x0_; ///<成员变量:局部坐标的坐标原点在全局坐标系下的x值。\n double m_y0_; ///<成员变量:局部坐标的坐标原点在全局坐标系下的y值。\n double m_angle_; ///<成员变量:局部坐标系y轴指向。\n};\n\n#endif // MATH_TOOLS_H\n"
},
{
"alpha_fraction": 0.6760627627372742,
"alphanum_fraction": 0.6814911961555481,
"avg_line_length": 36.94044494628906,
"blob_id": "e128fce573c9a00867b30acedfa54fd46619f5e9",
"content_id": "636522912086979c1aaffe833d489fd164ac25f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 16346,
"license_type": "no_license",
"max_line_length": 495,
"num_lines": 403,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/control_logic.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"control_logic.h\"\n\nnamespace athena\n{\nnamespace control\n{\n\nvoid ControlLogic::Init(ControlLogicConfig control_logic_config)\n{\n\n control_logic_config_ = control_logic_config;\n\n switch (control_logic_config_.vehicle_type_)\n {\n case CS55:\n {\n controller_config_.get_acc_value_callback_ = CS55GetAccValue;\n controller_config_.get_brake_value_callback_ = CS55GetBrakeVaule;\n //档位控制\n RegisterGearControl(&cs55_gear_control_);\n }\n break;\n case TRUCK_J6P:\n controller_config_.get_acc_value_callback_ = TruckJ6pGetAccValue;\n controller_config_.get_brake_value_callback_ = TruckJ6pGetBrakeVaule;\n //档位控制\n RegisterGearControl(&truck_j6p_gear_control_);\n break;\n default:\n break;\n }\n\n controller_config_.get_log_callback_ = Logging::LogInfo;\n controller_config_.get_current_time_callback_ = GetTime::GetGpsCurrentTime;\n\n if (control_logic_config_.debug_driving_mode_ == 1)\n {\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n controller_config_.steering_driving_mode_ = control_logic_config_.debug_steering_driving_mode_;\n ///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n controller_config_.acc_driving_mode_ = control_logic_config_.debug_acc_driving_mode_;\n ///刹车控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n controller_config_.brake_driving_mode_ = control_logic_config_.debug_brake_driving_mode_;\n ///EPB控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n controller_config_.epb_driving_mode_ = control_logic_config_.debug_epb_driving_mode_;\n }\n\n ///前轮侧偏刚度\n controller_config_.cf_ = control_logic_config_.cf_;\n ///后轮侧偏刚度\n controller_config_.cr_ = control_logic_config_.cr_;\n ///车辆重量\n controller_config_.vehicle_weight_ = control_logic_config_.vehicle_weight_;\n\n ///车辆高度\n controller_config_.h_ = control_logic_config_.h_;\n ///车长\n controller_config_.vehicle_length_ = control_logic_config_.vehicle_length_;\n\n ///车辆宽度\n controller_config_.vehicle_width_ = control_logic_config_.vehicle_width_;\n ///轮距\n controller_config_.wheelbase_ = control_logic_config_.wheelbase_;\n ///前轮轴距\n controller_config_.lf_ = control_logic_config_.lf_;\n ///后轮轴距\n controller_config_.lr_ = control_logic_config_.lr_;\n ///车轮半径\n controller_config_.wheel_radius_ = control_logic_config_.wheel_radius_;\n ///转向比\n controller_config_.steer_tranmission_ratio_ = control_logic_config_.steer_tranmission_ratio_;\n ///最大刹车值\n controller_config_.max_brake_value_ = control_logic_config_.max_brake_value_;\n ///怠速状态最大减速度\n controller_config_.max_deceleration_in_idle_ = control_logic_config_.max_deceleration_in_idle_;\n ///横向控制PID动态kp\n controller_config_.moving_kp_ = control_logic_config_.moving_kp_;\n ///横向控制PID调节P值\n controller_config_.lat_kp_ = control_logic_config_.lat_kp_;\n ///横向控制PID调节I值\n controller_config_.lat_ki_ = control_logic_config_.lat_ki_;\n ///横向控制PID调节D值\n controller_config_.lat_kd_ = control_logic_config_.lat_kd_;\n ///纵向控制PID调节P值\n controller_config_.lon_kp_ = control_logic_config_.lon_kp_;\n ///纵向控制PID调节I值\n controller_config_.lon_ki_ = control_logic_config_.lon_ki_;\n ///纵向控制PID调节D值\n controller_config_.lon_kd_ = control_logic_config_.lon_kd_;\n ///地图坐标原点纬度\n controller_config_.origin_lat_ = control_logic_config_.origin_lat_;\n ///地图坐标原点经度\n controller_config_.origin_lon_ = control_logic_config_.origin_lon_;\n ///位置误差门限值\n controller_config_.max_position_error_ = control_logic_config_.max_position_error_;\n ///最大转向角\n controller_config_.max_steering_angle_ = control_logic_config_.max_steering_angle_;\n ///最小转向角\n controller_config_.min_steering_angle_ = control_logic_config_.min_steering_angle_;\n ///限速\n controller_config_.vechile_speed_max_ = control_logic_config_.vechile_speed_max_;\n\n //动态kp值变化 suggest_kp = steer_angle/kp_slope_ + kp_value_\n controller_config_.kp_slope_ = control_logic_config_.kp_slope_;\n controller_config_.kp_value_ = control_logic_config_.kp_value_;\n ///预描距离\n controller_config_.xla_ = control_logic_config_.xla_;\n ///位置误差比重\n controller_config_.k_e_err_ = control_logic_config_.k_e_err_;\n ///角度误差比重\n controller_config_.k_fi_err_ = control_logic_config_.k_fi_err_;\n ///总误差比重\n controller_config_.k_ela_ = control_logic_config_.k_ela_;\n\n\n\n controller_config_.controller_switch_ = control_logic_config_.controller_switch_;\n\n controller_config_.lqr_ts_ = control_logic_config_.lqr_ts_;\n ///LQR预测窗口大小\n controller_config_.lqr_preview_window_ = control_logic_config_.lqr_preview_window_;\n ///LQR计算阀值\n controller_config_.lqr_eps_ = control_logic_config_.lqr_eps_;\n ///LQR滤波器窗口大小\n controller_config_.lqr_mean_filter_window_size_ = control_logic_config_.lqr_mean_filter_window_size_;\n ///LQR最大迭代次数\n controller_config_.lqr_max_iteration_ = control_logic_config_.lqr_max_iteration_;\n ///LQR横向最大加速度\n controller_config_.lqr_max_lateral_acceleration_ = control_logic_config_.lqr_max_lateral_acceleration_;\n ///最小速度保护\n controller_config_.lqr_minimum_speed_protection_ = control_logic_config_.lqr_minimum_speed_protection_;\n ///\n controller_config_.lqr_cutoff_freq_ = control_logic_config_.lqr_cutoff_freq_;\n ///\n controller_config_.lqr_mean_filter_window_size_ = control_logic_config_.lqr_mean_filter_window_size_;\n controller_config_.lqr_matrix_q_.assign(control_logic_config_.lqr_matrix_q_.begin(),control_logic_config_.lqr_matrix_q_.end());\n controller_config_.lqr_lat_err_scheduler_init_.assign(control_logic_config_.lqr_lat_err_scheduler_init_.begin(),control_logic_config_.lqr_lat_err_scheduler_init_.end());\n controller_config_.lqr_heading_err_scheduler_init_.assign(control_logic_config_.lqr_heading_err_scheduler_init_.begin(),control_logic_config_.lqr_heading_err_scheduler_init_.end());\n\n if (controller_.Init(controller_config_) == false)\n {\n Logging::LogInfo(Logging::ERROR, \"controller init error\");\n }\n}\n\nvoid ControlLogic::RegisterGearControl(GearControl *gear_control)\n{\n gear_control_ = gear_control;\n}\n\nvoid ControlLogic::SubscribeTrajectory(Trajectory trajectory)\n{\n trajectory_.points_.clear();\n trajectory_ = trajectory;\n controller_.SetTrajectory(&trajectory_);\n}\n\nvoid ControlLogic::SubscribeLocalization(Localization localization)\n{\n localization_ = localization;\n}\n\nvoid ControlLogic::SubscribeChassis(ChassisDetail chassis_detail)\n{\n chassis_detail_ = chassis_detail;\n chassis_.car_speed_ = chassis_detail.car_speed_;\n chassis_.steering_angle_feedback_ = chassis_detail.steering_angle_feedback_;\n //std::cout<<\"steering_angle_feedback_:\"<<chassis_.steering_angle_feedback_<<endl;\n chassis_.at_status_feedback_ = chassis_detail.at_gear_;\n chassis_.brake_value_feedback_ = chassis_detail.brake_value_feedback_;\n}\n\nvoid ControlLogic::SubscribeBcmControl(BcmControlCmd bcm_control_cmd)\n{\n bcm_control_cmd_ = bcm_control_cmd;\n}\n\nvoid ControlLogic::GetControlCmd(ControlCmd &control_cmd)\n{\n control_cmd = control_cmd_;\n}\n\nvoid ControlLogic::GetBcmControlCmd(BcmControlCmd &bcm_control_cmd)\n{\n ///喇叭状态 0 禁声 1 鸣笛\n bcm_control_cmd.speaker_control_ = bcm_control_cmd_.speaker_control_;\n ///远光灯 0 关闭 1 开启\n bcm_control_cmd.high_beam_ctrl_ = bcm_control_cmd_.high_beam_ctrl_;\n ///近光灯 0 关闭 1 开启\n bcm_control_cmd.low_beam_ctrl_ = bcm_control_cmd_.low_beam_ctrl_;\n ///左转向灯 0 关闭 1 开启\n bcm_control_cmd.left_turn_ctrl_ = bcm_control_cmd_.left_turn_ctrl_;\n ///右转向灯 0 关闭 1 开启\n bcm_control_cmd.right_turn_ctrl_ = bcm_control_cmd_.right_turn_ctrl_;\n ///前雨刮器 0 关闭 1 开启\n bcm_control_cmd.front_wiper_ctrl_ = bcm_control_cmd_.front_wiper_ctrl_;\n ///后雨刮器 0 关闭 1 开启\n bcm_control_cmd.rear_wiper_ctrl_ = bcm_control_cmd_.rear_wiper_ctrl_;\n ///位置灯 0 关闭 1 开启\n bcm_control_cmd.position_lamp_ctrl_ = bcm_control_cmd_.position_lamp_ctrl_;\n ///前雾灯 0 关闭 1 开启\n bcm_control_cmd.front_fog_lamp_ctrl_ = bcm_control_cmd_.front_fog_lamp_ctrl_;\n ///后雾灯 0 关闭 1 开启\n bcm_control_cmd.rear_fog_lamp_ctrl_ = bcm_control_cmd_.rear_fog_lamp_ctrl_;\n ///刹车灯 一般情况自动控制 0 关闭 1 开启\n bcm_control_cmd.brake_lamp_ctrl_ = bcm_control_cmd_.brake_lamp_ctrl_;\n ///警报灯 双闪 0 关闭 1 开启\n bcm_control_cmd.alarm_lamp_ctrl_ = bcm_control_cmd_.alarm_lamp_ctrl_;\n /// 左前门控制 0 关闭 1 开启\n bcm_control_cmd.lf_door_ctrl_ = bcm_control_cmd_.lf_door_ctrl_;\n /// 右前门控制 0 关闭 1 开启\n bcm_control_cmd.rf_door_ctrl_ = bcm_control_cmd_.rf_door_ctrl_;\n /// 左后门控制 0 关闭 1 开启\n bcm_control_cmd.lr_door_ctrl_ = bcm_control_cmd_.lr_door_ctrl_;\n /// 右后门控制 0 关闭 1 开启\n bcm_control_cmd.rr_door_ctrl_ = bcm_control_cmd_.rr_door_ctrl_;\n}\n\nvoid ControlLogic::GetControlInfoReport(ControlInfoReport &control_info_report)\n{\n control_info_report.cur_lon_ = localization_.lon_;\n control_info_report.cur_lat_ = localization_.lat_;\n\n control_info_report.cur_yaw_ = localization_.heading_;\n control_info_report.cur_brake_ = chassis_.brake_value_feedback_;\n control_info_report.cur_speed_ = chassis_.car_speed_;\n control_info_report.cur_speed_lateral_ = localization_.lateral_speed_;\n control_info_report.cur_speed_longitudinal_ = localization_.longitudinal_speed_;\n ///加速度模式\n control_info_report.cur_acceleration_pattern_ = 0;\n control_info_report.cur_acceleration_ = sqrt(pow(localization_.lateral_accelerate_, 2) + pow(localization_.longitudinal_accelerate_, 2));\n ;\n control_info_report.cur_acceleration_lateral_ = localization_.lateral_accelerate_;\n control_info_report.cur_acceleration_longitudinal_ = localization_.longitudinal_accelerate_;\n control_info_report.steering_angle_ = controller_output_.steering_angle_;\n\n control_info_report.gps_time_ = localization_.gps_time_;\n}\n\nvoid ControlLogic::ComputeControlOutputOnTimer()\n{\n controller_.ComputeControlOutput(&localization_, &chassis_, &controller_output_);\n SetControlCmd(&control_cmd_, controller_output_);\n}\n\ndouble ControlLogic::GetSteeringAngleFeedback()\n{\n return chassis_.steering_angle_feedback_;\n}\n\nvoid ControlLogic::run()\n{\n while (1)\n {\n ComputeControlOutputOnTimer();\n //10ms\n usleep(20000);\n }\n}\n\nbool ControlLogic::GetControllerAlarmInfo(Emergency *emergency)\n{\n bool emergency_enable = false;\n std::vector<ControllerOutputAlarm::AlarmInfoTable> alarm_list;\n controller_.GetAlarmTableInfo(&alarm_list);\n\n for (auto it = alarm_list.begin(); it != alarm_list.end(); it++)\n {\n std::string info = \"alarm_code:\" + std::to_string(it->alarm_info.alarm_code) + \"\\n\" + \"alarm_level:\" + std::to_string(it->alarm_info.alarm_level) + \"\\n\" + \"timer_stamp:\" + std::to_string(it->timer_stamp.year) + \"-\" + std::to_string(it->timer_stamp.month) + \"-\" + std::to_string(it->timer_stamp.day) + \"-\" + std::to_string(it->timer_stamp.hour) + \"-\" + std::to_string(it->timer_stamp.minute) + \"-\" + std::to_string(it->timer_stamp.second) + \":\" + std::to_string(it->timer_stamp.m_second);\n\n if (it->alarm_info.alarm_level == ControllerAlarmCodeLevel::NORMAL)\n {\n Logging::LogInfo(Logging::INFO, info);\n }\n\n if ((it->alarm_info.alarm_level == ControllerAlarmCodeLevel::LOW_WARNING) || (it->alarm_info.alarm_level == ControllerAlarmCodeLevel::HIGH_WARNING))\n {\n Logging::LogInfo(Logging::WARNING, info);\n }\n\n if (it->alarm_info.alarm_level == ControllerAlarmCodeLevel::ERROR)\n {\n Logging::LogInfo(Logging::ERROR, info);\n emergency_enable = true;\n }\n }\n\n if (emergency_enable == true)\n {\n emergency->emergency_mode_ = Emergency::EMERGENCY_BRAKING;\n emergency->emergency_level_ = Emergency::ALL_SITUATION;\n emergency->emergency_value_ = control_logic_config_.max_brake_value_;\n }\n\n return emergency_enable;\n}\n\nvoid ControlLogic::SetDrivingMode(int driving_mode)\n{\n if (driving_mode == HUMAN_DRIVING_MODE)\n {\n driving_mode_ = HUMAN_DRIVING_MODE;\n return;\n }\n //底层错误\n if (chassis_detail_.chassis_error_ == 1)\n {\n driving_mode_ = HUMAN_DRIVING_MODE;\n Logging::LogInfo(Logging::WARNING, \"chassis error\");\n return;\n }\n\n if (chassis_detail_.steering_driving_mode_feedback_ == UNCONTROLLABLE)\n {\n driving_mode_ = HUMAN_DRIVING_MODE;\n Logging::LogInfo(Logging::WARNING, \"steering_driving_mode_feedback_ uncontrollable\");\n return;\n }\n\n if (chassis_detail_.acc_driving_mode_feedback_ == UNCONTROLLABLE)\n {\n driving_mode_ = HUMAN_DRIVING_MODE;\n Logging::LogInfo(Logging::WARNING, \"acc_driving_mode_feedback_ uncontrollable\");\n return;\n }\n\n if (chassis_detail_.brake_driving_mode_feedback_ == UNCONTROLLABLE)\n {\n driving_mode_ = HUMAN_DRIVING_MODE;\n Logging::LogInfo(Logging::WARNING, \"brake_driving_mode_feedback_ uncontrollable\");\n return;\n }\n\n if (chassis_detail_.epb_driving_mode_feedback_ == UNCONTROLLABLE)\n {\n driving_mode_ = HUMAN_DRIVING_MODE;\n Logging::LogInfo(Logging::WARNING, \"epb_driving_mode_feedback_ uncontrollable\");\n return;\n }\n\n driving_mode_ = driving_mode;\n}\n\nint32_t ControlLogic::GetDrivingMode()\n{\n return driving_mode_;\n}\n\nvoid ControlLogic::SetControlCmd(ControlCmd *control_cmd, ControllerOutput controller_output)\n{\n // SetDrivingMode(int driving_mode);\n ///方向盘转角,单位:度\n control_cmd->steering_angle_ = controller_output.steering_angle_;\n ///方向盘角速度,单位:度/s\n control_cmd->steering_angle_speed_ = controller_output.steering_angle_speed_;\n ///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n SetDrivingMode(controller_output.steering_driving_mode_);\n control_cmd->steering_driving_mode_ = GetDrivingMode();\n ///节气门开度\n control_cmd->acc_value_ = controller_output.acc_value_;\n ///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n SetDrivingMode(controller_output.acc_driving_mode_);\n control_cmd->accelerate_driving_mode_ = GetDrivingMode();\n ///刹车\n control_cmd->brake_value_ = controller_output.brake_value_;\n ///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n SetDrivingMode(controller_output.brake_driving_mode_);\n control_cmd->brake_driving_mode_ = GetDrivingMode();\n ///EPB状态\n control_cmd->epb_enable_ = controller_output.epb_status_;\n ///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n SetDrivingMode(controller_output.brake_driving_mode_);\n control_cmd->epb_driving_mode_ = GetDrivingMode();\n ///档位控制\n control_cmd->gear_lever_ = gear_control_->GetGearLevel();\n}\n\nvoid ControlLogic::GetControlLogicDebugOutput(ControlLogicDebugOutput &control_logic_debug_output)\n{\n DebugOutput debug_output;\n controller_.GetControllerInfo(debug_output);\n control_logic_debug_output_.lon_controller_tar_speed_ = debug_output.tar_speed_;\n control_logic_debug_output = control_logic_debug_output_;\n control_logic_debug_output.chassis_detail_output_ = chassis_detail_;\n control_logic_debug_output.localization_output_ = localization_;\n}\n\nvoid ControlLogic::GetControllerInfo(DebugOutput &debug_output)\n{\n controller_.GetControllerInfo(debug_output);\n}\n\nvoid ControlLogic::SetDrivingModeDebug(int32_t mode)\n{\n controller_.SetDrivingModeDebug(mode);\n}\n\nvoid ControlLogic::SetTarSpeedDebug(int32_t tar_speed,bool valid)\n{\n controller_.SetTarSpeedDebug(tar_speed,valid);\n}\n} // namespace control\n} // namespace athena\n"
},
{
"alpha_fraction": 0.6388673186302185,
"alphanum_fraction": 0.6431342363357544,
"avg_line_length": 22.01785659790039,
"blob_id": "6d6bf7ebe9b088aeb4c775280d953ef0c05962de",
"content_id": "b2a6595411e184376f91dda06aaa8aa9ceb063f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2704,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 112,
"path": "/athena/examples/LCM/Singlecar/control/apps/message_manger/lcm/lcm_message_manger.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file lcm_message_manger.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_MESSAGE_MANGER_LCM_LCM_MESSAGE_MANGER_H_\n#define APPS_MESSAGE_MANGER_LCM_LCM_MESSAGE_MANGER_H_\n\n#include \"../message_manger.h\"\n#include <lcm/lcm.h>\n#include <lcm/lcm-cpp.hpp>\n#include \"msgs/obu_lcm/mt_info_report.hpp\"\n#include \"msgs/obu_lcm/ins_info.hpp\"\n#include \"msgs/obu_lcm/vehicle_info.hpp\"\n#include \"msgs/obu_lcm/control_cmd.hpp\"\n#include \"msgs/obu_lcm/mt_bcm_control_cmd.hpp\"\n#include \"msgs/obu_lcm/control_info_report.hpp\"\n#include \"msgs/obu_lcm/emergency.hpp\"\n#include \"msgs/obu_lcm/bcm_control_cmd.hpp\"\n#include \"../../../common/Thread.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\n\nnamespace athena{\nnamespace control{\n\n /**\n * @class LcmMessageManger\n * @brief LCM消息管理器.\n */\nclass LcmMessageManger:public MessageManger,public Thread{\n public:\n LcmMessageManger() = default;\n ~LcmMessageManger() = default;\n\n /**\n * @brief 初始化。\n * @param[in] obu_url LCM组播信息.\n * @return void.\n */\n void Init(string obu_url,ControlLogic *control_logic);\n\n /**\n * @brief 控制消息发布.\n * @param[in] control_cmd 控制命令输出.\n * @return void.\n */\n void PublishControlCmd(ControlCmd control_cmd);\n\n /**\n * @brief 控制消息上报.\n * @param[in] control_info_report 控制信息.\n * @return void.\n */\n void PublishControlInfoReport(ControlInfoReport control_info_report);\n\n /**\n * @brief BCM控制信息发送.\n * @param[in] bcm_control_cmd BCM控制信息.\n * @return void.\n */\n void PublishBcmControlCmd(BcmControlCmd bcm_control_cmd);\n\n /**\n * @brief 紧急事件消息发送.\n * @param[in] emergency 紧急事件.\n * @return void.\n */\n void PublishEmergencyCmd(Emergency emergency);\n\n private:\n ControlLogic *control_logic_;\n lcm::LCM* lcm_;\n\n void HandleLocalizationMessage(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ins_info * msg);\n\n void HandleChassisDetailMessage(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::vehicle_info* msg);\n\n\n void HandleTrajectoryMessage(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::mt_info_report* msg);\n\n void HandleMtBcmControlMessage(\n const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::mt_bcm_control_cmd* msg);\n\n /**\n * @brief 线程运行函数.\n * @return void.\n */\n void run();\n};\n}\n}\n\n#endif // APPS_MESSAGE_MANGER_LCM_LCM_MESSAGE_MANGER_H_\n"
},
{
"alpha_fraction": 0.6660839319229126,
"alphanum_fraction": 0.6975524425506592,
"avg_line_length": 28.947368621826172,
"blob_id": "e6853cc631535011a2ac0979b0f312a5262ce696",
"content_id": "cd23378da9e80cd6aed5793d69519e6821f8e3d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 890,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 19,
"path": "/athena/examples/LCM/Singlecar/launch_car/conf/simulate/readme.txt",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "simulate.xml格式:\n\n1、可以模拟的对象:\n obu_list: 模拟自动驾驶车(带obu_planning的车)\n car_list: 模拟社会车辆\n rsd_list: 模拟RSD(路侧采集器)\n people_list: 模拟行人\n block_list: 模拟物理施工牌\n\n2、配置参数:\n <obu name=\"鄂A DF001\" route=\"key\" start=\"2\" speed=\"0\" option=\"true_motion\"/>\n route=\"key\" : 采用同目录下key.route中定义的点\n start=\"2\" : 采用key.route第2行定义的点\n speed=\"0\" : 默认速度是0km/h,另,random(20)表示0-20随机选择一个速度\n option=\"true_motion\" : 如果添加true_motion,表示带了motion和controller一起调试,否则sim_system模拟motion\n\n3、地址文件:\n 默认有一个key.route的地址文件,保存主要的地址\n 可以在nad2.osm中编辑自己的地址或路径,用NadCoder.exe工具生成route文件\n "
},
{
"alpha_fraction": 0.5339113473892212,
"alphanum_fraction": 0.5610790252685547,
"avg_line_length": 21.467533111572266,
"blob_id": "29e8a865362c49d16319d33a1da9f3b49ef6de71",
"content_id": "aca8a629b0943d753cab8e9c717468d7d70deba3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6132,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 231,
"path": "/athena/core/x86/Control/include/controller_config.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file controller_config.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_CONFIG_H_\n#define CONTROLLER_CONFIG_H_\n\n#include <functional>\n#include <vector>\n#include \"scheduler.h\"\n\n//using namespace std;\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena\n{\nnamespace control\n{\n\n///时间格式\ntypedef struct\n{\n ///年\n int32_t year;\n ///月\n int32_t month;\n ///日\n int32_t day;\n ///时\n int32_t hour;\n ///分\n int32_t minute;\n ///秒\n int32_t second;\n ///毫秒\n int32_t m_second;\n} Tm;\n\n/**\n * @class ControllerConfig\n *\n * @brief ControllerConfig.\n */\nclass ControllerConfig\n{\n public:\n ControllerConfig()\n {\n steering_driving_mode_ = 0;\n acc_driving_mode_ = 0;\n brake_driving_mode_ = 0;\n epb_driving_mode_ = 0;\n cf_ = 0.0;\n cr_ = 0.0;\n vehicle_weight_ = 0.0;\n vehicle_length_ = 0.0;\n lf_ = 0.0;\n lr_ = 0.0;\n h_ = 0.0;\n vehicle_width_ = 0.0;\n wheel_radius_ = 0.0;\n wheelbase_ = 0.0;\n steer_tranmission_ratio_ = 0.0;\n max_brake_value_ = 0.0;\n max_deceleration_in_idle_ = 0.0;\n min_speed_ = 0.0;\n moving_kp_ = 0.0;\n lat_kp_ = 0.0;\n lat_ki_ = 0.0;\n lat_kd_ = 0.0;\n lon_kp_ = 0.0;\n lon_ki_ = 0.0;\n lon_kd_ = 0.0;\n origin_lat_ = 0.0;\n origin_lon_ = 0.0;\n max_position_error_ = 0.0;\n max_steering_angle_ = 0.0;\n min_steering_angle_ = 0.0;\n get_acc_value_callback_ = NULL;\n get_brake_value_callback_ = NULL;\n get_current_time_callback_ = NULL;\n\n kp_slope_ = 30.0;\n kp_value_ = 2.0;\n ///预描距离\n xla_ = 40.0;\n ///位置误差比重\n k_e_err_ = 2.1;\n ///角度误差比重\n k_fi_err_ = 1.0;\n ///总误差比重\n k_ela_ = 1.28;\n }\n ~ControllerConfig() = default;\n\n typedef enum : int32_t\n {\n INFO = 1, /**< 信息*/\n WARNING = 2, /**< 警告*/\n ERROR = 3, /**< 错误*/\n } LogLevel;\n\n ///获取节气门开度的回调函数\n typedef std::function<double(double, double)> GetAccValueCallBack;\n ///获取刹车值的回调函数\n typedef std::function<double(double)> GetBrakeValueCallBack;\n ///获取当前时间(UTC)的回调函数\n typedef std::function<void(int &year, int &month, int &day, int &hour, int &minute, int &second, int &millisecond)> GetCurrentTimerCallBack;\n ///日志记录回调函数 参数1=等级 详参考枚举LogLevel 参数2=打印信息\n typedef std::function<void(int, std::string)> GetLogCallBack;\n\n ///转向工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t steering_driving_mode_;\n ///纵向控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t acc_driving_mode_;\n ///刹车控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t brake_driving_mode_;\n ///EPB控制工作模式 1 = 自动驾驶 0 = 非自动驾驶\n int32_t epb_driving_mode_;\n ///前轮侧偏刚度\n double cf_;\n ///后轮侧偏刚度\n double cr_;\n ///车辆重量\n double vehicle_weight_;\n ///车长\n double vehicle_length_;\n ///前轮轴距\n double lf_;\n ///后轮轴距\n double lr_;\n ///车辆高度\n double h_;\n ///前轮轮距\n double vehicle_width_;\n ///轴距离\n double wheelbase_;\n ///车轮半径\n double wheel_radius_;\n ///转向比\n double steer_tranmission_ratio_;\n ///最大刹车值\n double max_brake_value_;\n ///怠速状态最大减速度\n double max_deceleration_in_idle_;\n ///车辆最小速度\n double min_speed_;\n ///横向控制动态kp\n double moving_kp_;\n ///横向控制PID调节P值\n double lat_kp_;\n ///横向控制PID调节I值\n double lat_ki_;\n ///横向控制PID调节D值\n double lat_kd_;\n ///纵向控制PID调节P值\n double lon_kp_;\n ///纵向控制PID调节I值\n double lon_ki_;\n ///纵向控制PID调节D值\n double lon_kd_;\n ///地图坐标原点纬度\n double origin_lat_;\n ///地图坐标原点经度\n double origin_lon_;\n ///位置误差门限值\n double max_position_error_;\n ///最大转向角\n double max_steering_angle_;\n ///最小转向角\n double min_steering_angle_;\n ///限速值 m/s\n double vechile_speed_max_;\n\n //动态kp值变化 suggest_kp = steer_angle/kp_slope_ + kp_value_\n double kp_slope_;\n double kp_value_;\n ///预描距离\n double xla_;\n ///位置误差比重\n double k_e_err_;\n ///角度误差比重\n double k_fi_err_;\n ///总误差比重\n double k_ela_;\n\n ///获取节气门开度的回调函数\n GetAccValueCallBack get_acc_value_callback_;\n ///获取刹车值的回调函数的回调函数\n GetBrakeValueCallBack get_brake_value_callback_;\n ///获取当前时间(UTC)的回调函数\n GetCurrentTimerCallBack get_current_time_callback_;\n ///日志记录回调函数.\n GetLogCallBack get_log_callback_;\n ///LQR Q加权矩阵\n std::vector<double> lqr_matrix_q_;\n ///控制器选择\n int32_t controller_switch_ = 0.0;\n ///LQR离散时长\n double lqr_ts_=0.01;\n ///LQR预测窗口大小\n double lqr_preview_window_=0;\n ///LQR计算阀值\n double lqr_eps_=0.01;\n ///LQR滤波器窗口大小\n double lqr_mean_filter_window_size_=10;\n ///LQR最大迭代次数\n double lqr_max_iteration_=150;\n ///LQR横向最大加速度\n double lqr_max_lateral_acceleration_=5.0;\n ///最小速度保护\n double lqr_minimum_speed_protection_=0.1;\n ///\n int32_t lqr_cutoff_freq_;\n ///横向误差调节器 避免误差过大的时候有较大调节\n std::vector<Scheduler> lqr_lat_err_scheduler_init_;\n ///航向角误差调节器 避免误差过大的时候有较大调节\n std::vector<Scheduler> lqr_heading_err_scheduler_init_;\n};\n} // namespace control\n} // namespace athena\n\n#endif // CONTROLLER_CONFIG_H_\n"
},
{
"alpha_fraction": 0.6302003264427185,
"alphanum_fraction": 0.659476101398468,
"avg_line_length": 17.542856216430664,
"blob_id": "e527911563be63b73e474a1a886887484f5896de",
"content_id": "c4b4395af23768139d73b8fb7c0c6f1297ab8ab0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 655,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 35,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/gear/TRUCK_J6P/truck_j6p_gear_control.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file cs55_gear_control.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROL_LOGIC_GEAR_TRUCK_J6P_GEAR_CONTROL_H_\n#define CONTROL_LOGIC_GEAR_TRUCK_J6P_GEAR_CONTROL_H_\n\n#include \"../gear_control.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class Control\n * @brief 控制类.\n */\nclass TruckJ6pGearControl:public GearControl{\n public:\n TruckJ6pGearControl() = default;\n ~TruckJ6pGearControl() = default;\n\n GearLevel GetGearLevel();\n};\n}\n}\n#endif //CONTROL_LOGIC_GEAR_TRUCK_J6P_GEAR_CONTROL_H_\n"
},
{
"alpha_fraction": 0.5064795017242432,
"alphanum_fraction": 0.5291576385498047,
"avg_line_length": 15.535714149475098,
"blob_id": "ca60143862b5ebd49f891196d0113e2118829193",
"content_id": "d7c93031962ad42d4764bc28c94af11c45307150",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1086,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 56,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/alarm/nad_warning.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_warning.h\n * 创建者:张毅00151602\n * 时 间:2016-10-11\n * 描 述:道路上的预警点\n-------------------------------------------------------*/\n#ifndef _NAD_WARNING_H\n#define _NAD_WARNING_H\n\n//#include \"../../msg/nad_msg.h\"\n#include \"nad_base.h\"\n#include \"route.h\"\n\nusing namespace athena;\n//地图上的一个预警点\nclass nad_warning\n{\npublic:\n string id;\n string type;\n string desc;\n double lon;\n double lat;\n double x;\n double y;\n int64_t lane_id;\n\n //构造函数\n nad_warning()\n {\n lon = lat = x = y = 0.0;\n lane_id = 0;\n }\n};\n\n//地图上的预警点列表\nclass nad_warning_list\n{\n //预警点列表\n vector<nad_warning> list;\n\n //坐标转化器\n route::coord_transfer transfer;\n\npublic:\n //从数据库中加载预警点\n void load_from_db();\n\n //把预警点绑定到route::RouteBase上\n void bind_key_point(route::RouteBase &route);\n};\n\n//获得预警点类型ID\nint get_warning_type(string type);\n\n#endif\n"
},
{
"alpha_fraction": 0.559540867805481,
"alphanum_fraction": 0.5649210810661316,
"avg_line_length": 23.883928298950195,
"blob_id": "c1201c499bd8e3479afd9f409e0b9d8a89ea11e6",
"content_id": "2acec5adf3a16c68a6343374c255a4f656535529",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2788,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 112,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerInComplexLaneBoundary.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\nconst int MaximumLaneMarkerNumberInComplexLaneBoundary = 3;\nconst int MaximumFrameNumberForComplexLaneBoundary = 10;\nconst int CLBT_NONE\t\t=\t0;\nconst int CLBT_SINGLE\t=\t1;\nconst int CLBT_DOUBLE\t=\t2;\nconst int CLBT_TRIPLE\t=\t3;\n\nclass LaneMarkerInComplexLaneBoundary\n{\nprivate:\n int _iLineType;\n LaneMarker *_pLM;\n FlexArray<int> *_faiWidth;\n FlexArray<int> *_faiPointNumber;\npublic:\n inline LaneMarkerInComplexLaneBoundary(void)\n {\n _iLineType = LBT_NONE;\n _pLM = NULL;\n _faiWidth = new FlexArray<int>;\n _faiPointNumber = new FlexArray<int>;\n }\n inline LaneMarkerInComplexLaneBoundary(LaneMarker *pLM)\n {\n _pLM = NULL;\n if(pLM != NULL)\n {\n _pLM = new LaneMarker(pLM);\n }\n _iLineType = LBT_NONE;\n _faiWidth = new FlexArray<int>;\n _faiPointNumber = new FlexArray<int>;\n }\n inline ~LaneMarkerInComplexLaneBoundary(void)\n {\n//\t\tSAFE_DELETE(_faiWidth);\n//\t\tSAFE_DELETE(_faiPointNumber);\n SAFE_DELETE(_pLM);\n delete _faiWidth;\n delete _faiPointNumber;\n }\n inline LaneMarker *getLaneMarker(void)\n {\n return _pLM;\n }\n inline void setLaneMarker(LaneMarker *pLM)\n {\n SAFE_DELETE(_pLM);\n _pLM = pLM;\n }\n inline int LineType(void)\n {\n return _iLineType;\n }\n inline void LineType(int iV)\n {\n _iLineType = iV;\n }\n inline FlexArray<int> *Widths(void)\n {\n return _faiWidth;\n }\n inline FlexArray<int> *PointNumbers(void)\n {\n return _faiPointNumber;\n }\n inline void addWidth(int iV)\n {\n if(Widths() == NULL)\treturn;\n int iNumber = Widths()->getNumber();\n if(iNumber >= MaximumFrameNumberForComplexLaneBoundary)\n {\n Widths()->remove(0);\n }\n Widths()->add(iV);\n }\n inline void addPointNumber(int iV)\n {\n if(PointNumbers() == NULL)\treturn;\n int iNumber = PointNumbers()->getNumber();\n if(iNumber >= MaximumFrameNumberForComplexLaneBoundary)\n {\n PointNumbers()->remove(0);\n }\n PointNumbers()->add(iV);\n }\n inline int AverageWidth(void)\n {\n if(Widths() == NULL)\treturn -1;\n int iNumber = Widths()->getNumber();\n int iSum = 0;\n for(int iIdx = 0; iIdx < iNumber; iIdx++)\n {\n iSum += Widths()->get(iIdx);\n }\n return iSum / iNumber;\n }\n inline int AveragePointNumber(void)\n {\n if(PointNumbers() == NULL)\treturn -1;\n int iNumber = PointNumbers()->getNumber();\n int iSum = 0;\n for(int iIdx = 0; iIdx < iNumber; iIdx++)\n {\n iSum += PointNumbers()->get(iIdx);\n }\n return iSum / iNumber;\n }\n\n};\n\n"
},
{
"alpha_fraction": 0.692487359046936,
"alphanum_fraction": 0.698520839214325,
"avg_line_length": 28.525861740112305,
"blob_id": "8b099635d4166f23bd54d0a0da12e889a165f5e9",
"content_id": "a46912c14a4ae8250c1c7dcfe9153eb53e9a107a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 10276,
"license_type": "no_license",
"max_line_length": 309,
"num_lines": 348,
"path": "/athena/examples/ROS/src/Perception/display/src/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <ros/ros.h>\n\n#include <ssd_detection/SSD_Objects.h>\n#include <lane_detect/PixelPoints.h>\n#include <sensor_msgs/Image.h>\n\n//CFG\n#include <dynamic_reconfigure/server.h>\n#include <display/DisplayConfig.h>\n\n\n//SYNC\n#include <message_filters/subscriber.h>\n#include <message_filters/synchronizer.h>\n#include <message_filters/sync_policies/approximate_time.h>\n\n\n//CV_bridge\n#include <cv_bridge/cv_bridge.h>\n#include <sensor_msgs/image_encodings.h>\n\n//OpenCV\n#include <opencv2/opencv.hpp>\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n\n\n\nclass Display{\npublic:\n\n\ttypedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image,lane_detect::PixelPoints, ssd_detection::SSD_Objects> DisplayAllSyncPolicy;\n\ttypedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image,ssd_detection::SSD_Objects> DisplaySSDSyncPolicy;\n\ttypedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image,lane_detect::PixelPoints> DisplayLaneSyncPolicy;\n\n\n\ttypedef struct DisplayConfig{\n\t\tbool enable_show_ssd;\n\t\tint ssd_r;\n\t\tint ssd_g;\n\t\tint ssd_b;\n\t\tdouble ssd_line_width;\n\t\tbool enable_show_label;\t\t\n\n\t\tbool enable_show_lane;\n\t\tint lane_r;\n\t\tint lane_g;\n\t\tint lane_b;\n\t\tdouble lane_line_width;\n\t}DisplayConfig;\n\tDisplay(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private);\n\t~Display();\n\n\n\t\nprotected:\n\tvoid onLaneDisplayCb(const sensor_msgs::Image::ConstPtr& img, const lane_detect::PixelPoints::ConstPtr& msg);\n\tvoid onSSDDisplayCb(const sensor_msgs::Image::ConstPtr& img,const ssd_detection::SSD_Objects::ConstPtr& msg);\n\tvoid onLaneSSDDisplayCb(const sensor_msgs::Image::ConstPtr& img,const lane_detect::PixelPoints::ConstPtr& lanes, const ssd_detection::SSD_Objects::ConstPtr& objects);\n\tvoid onImageCb(const sensor_msgs::Image::ConstPtr& img);\n\n\n\n\t//cfg\n\tvoid configCallback(display::DisplayConfig &config, uint32_t level);\n\nprivate:\n\tros::NodeHandle nh_;\n\tros::NodeHandle nh_private_;\n\n\tros::Publisher pub_warpper_image_;\n\n\n\t//CFG\n\tdynamic_reconfigure::Server<display::DisplayConfig> server_;\n \tdynamic_reconfigure::Server<display::DisplayConfig>::CallbackType cfg_cb_;\n\tDisplayConfig displayConfig_;\n\n\tmessage_filters::Subscriber<lane_detect::PixelPoints> sub_lane_detect_result_;\n\tmessage_filters::Subscriber<ssd_detection::SSD_Objects> sub_SSD_result_;\n\tmessage_filters::Subscriber<sensor_msgs::Image> sub_image_raw_;\n\n\tmessage_filters::Synchronizer<DisplayLaneSyncPolicy>* sync_lane_;\n\tmessage_filters::Synchronizer<DisplaySSDSyncPolicy>* sync_ssd_;\n\tmessage_filters::Synchronizer<DisplayAllSyncPolicy>* sync_all_;\n};\n\n\nDisplay::Display(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private)\n\t:nh_(nh)\n\t,nh_private_(nh_private)\n\t,sub_lane_detect_result_(nh_,\"laneDetectPixelPoints\",10)\n\t,sub_SSD_result_(nh_,\"ssd_detection_objects\",10)\n\t,sub_image_raw_(nh_,\"/usb_cam/image_raw\",10)\n{\n\t//cfg\n\tcfg_cb_ = boost::bind(&Display::configCallback,this, _1, _2);\n \tserver_.setCallback(cfg_cb_);\n\n\t//Publisher\n\tpub_warpper_image_ = nh_.advertise<sensor_msgs::Image>(\"warpper_image\",5);\n}\n\t\nDisplay::~Display()\n{\n\t\n}\n\nvoid Display::onLaneDisplayCb(const sensor_msgs::Image::ConstPtr& img,const lane_detect::PixelPoints::ConstPtr& msg)\n{\n\tcv_bridge::CvImagePtr cv_ptr;\n \ttry\n \t{\n \t\tcv_ptr = cv_bridge::toCvCopy(img, sensor_msgs::image_encodings::BGR8);\n \t}\n \tcatch (cv_bridge::Exception& e)\n \t{\n \t\tROS_ERROR(\"cv_bridge exception: %s\", e.what());\n \t\treturn;\n \t}\n\n\t//Draw lanes\n\tstd::vector<cv::KeyPoint> points;\t\n\tfor(int i=0;i<msg->leftpoints.size();i++)\n\t{\n\t\tpoints.push_back(cv::KeyPoint(msg->leftpoints.at(i).x,msg->leftpoints.at(i).y,displayConfig_.lane_line_width));\n\t}\n\n\tfor(int i=0;i<msg->rightpoints.size();i++)\n\t{\n\t\tpoints.push_back(cv::KeyPoint(msg->rightpoints.at(i).x,msg->rightpoints.at(i).y,displayConfig_.lane_line_width));\n\t}\n\n\tcv::drawKeypoints(cv_ptr->image,points,cv_ptr->image,cv::Scalar(displayConfig_.lane_r,displayConfig_.lane_g,displayConfig_.lane_b));\n\n\tpub_warpper_image_.publish(cv_ptr->toImageMsg());\n\t\n}\nvoid Display::onSSDDisplayCb(const sensor_msgs::Image::ConstPtr& img,const ssd_detection::SSD_Objects::ConstPtr& msg)\n{\n\tcv_bridge::CvImagePtr cv_ptr;\n \ttry\n \t{\n \t\tcv_ptr = cv_bridge::toCvCopy(img, sensor_msgs::image_encodings::BGR8);\n \t}\n \tcatch (cv_bridge::Exception& e)\n \t{\n \t\tROS_ERROR(\"cv_bridge exception: %s\", e.what());\n \t\treturn;\n \t}\n\n\tfor(int i=0;i<msg->objects.size();i++)\n\t{\n\t\tcv::rectangle(cv_ptr->image,cv::Rect(msg->objects.at(i).obj_rect.pointf.x,msg->objects.at(i).obj_rect.pointf.y,msg->objects.at(i).obj_rect.width,msg->objects.at(i).obj_rect.height),cv::Scalar(displayConfig_.ssd_r,displayConfig_.ssd_g,displayConfig_.ssd_b),displayConfig_.ssd_line_width,1,0);\n\n\t\tif(displayConfig_.enable_show_label)\n\t\t{\n\t\t\tchar* label = NULL;\n\t\t\tswitch(msg->objects.at(i).classification)\n\t\t\t{\n\t\t\t\tcase 7:\n\t\t\t\t\tlabel = \"car\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase 15:\n\t\t\t\t\tlabel = \"person\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase 6:\n\t\t\t\t\tlabel = \"bus\";\n\t\t\t\t\tbreak;\n\t\t\t\tdefault:\n\t\t\t\t\tbreak;\n\t\t\t\t\t\n\t\t\t}\n\t\t\t//cv::putText(cv_ptr->image,std::string(label), cv::Point(0,0), 1.0,cv::FONT_HERSHEY_PLAIN, cv::Scalar(displayConfig_.ssd_r,displayConfig_.ssd_g,displayConfig_.ssd_b));\n\t\t}\n\t}\n\n\tpub_warpper_image_.publish(cv_ptr->toImageMsg());\n\t\n}\n\n\n\nvoid Display::onLaneSSDDisplayCb(const sensor_msgs::Image::ConstPtr& img,const lane_detect::PixelPoints::ConstPtr& lanes, const ssd_detection::SSD_Objects::ConstPtr& objects)\n{\n\tcv_bridge::CvImagePtr cv_ptr;\n \ttry\n \t{\n \t\tcv_ptr = cv_bridge::toCvCopy(img, sensor_msgs::image_encodings::BGR8);\n \t}\n \tcatch (cv_bridge::Exception& e)\n \t{\n \t\tROS_ERROR(\"cv_bridge exception: %s\", e.what());\n \t\treturn;\n \t}\n\t\n\t//Draw Lanes\n\tstd::vector<cv::KeyPoint> points;\t\n\tfor(int i=0;i<lanes->leftpoints.size();i++)\n\t{\n\t\tpoints.push_back(cv::KeyPoint(lanes->leftpoints.at(i).x,lanes->leftpoints.at(i).y,displayConfig_.lane_line_width));\n\t}\n\n\tfor(int i=0;i<lanes->rightpoints.size();i++)\n\t{\n\t\tpoints.push_back(cv::KeyPoint(lanes->rightpoints.at(i).x,lanes->rightpoints.at(i).y,displayConfig_.lane_line_width));\n\t}\n\n\tcv::drawKeypoints(cv_ptr->image,points,cv_ptr->image,cv::Scalar(displayConfig_.lane_b,displayConfig_.lane_g,displayConfig_.lane_r));\n\n\n\t//Draw Object boxes\n\tfor(int i=0;i<objects->objects.size();i++)\n\t{\n\t\tcv::rectangle(cv_ptr->image,cv::Rect(objects->objects.at(i).obj_rect.pointf.x,objects->objects.at(i).obj_rect.pointf.y,objects->objects.at(i).obj_rect.width,objects->objects.at(i).obj_rect.height),cv::Scalar(displayConfig_.ssd_b,displayConfig_.ssd_g,displayConfig_.ssd_r),displayConfig_.ssd_line_width,1,0);\n\n\t\tif(displayConfig_.enable_show_label)\n\t\t{\n\t\t\tchar* label = NULL;\n\t\t\tswitch(objects->objects.at(i).classification)\n\t\t\t{\n\t\t\t\tcase 7:\n\t\t\t\t\tlabel = \"car\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase 15:\n\t\t\t\t\tlabel = \"person\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase 6:\n\t\t\t\t\tlabel = \"bus\";\n\t\t\t\t\tbreak;\n\t\t\t\tdefault:\n\t\t\t\t\tbreak;\n\t\t\t\t\t\n\t\t\t}\n\t\t\t//cv::putText(cv_ptr->image,std::string(label), cv::Point(0,0),1.0,cv::FONT_HERSHEY_PLAIN, cv::Scalar(displayConfig_.ssd_r,displayConfig_.ssd_g,displayConfig_.ssd_b));\n\t\t}\n\n\t}\n\n\n\tpub_warpper_image_.publish(cv_ptr->toImageMsg());\n}\n\n\nvoid Display::onImageCb(const sensor_msgs::Image::ConstPtr& img)\n{\n\tpub_warpper_image_.publish(img);\n}\n\n\n\n\n\n\nvoid Display::configCallback(display::DisplayConfig &config, uint32_t level)\n{\n\tROS_INFO(\"Dynamic request. SSD cv::Scalar(%d,%d, %d), Lane cv::Scalar(%d, %d, %d), lane width: %f\",config.ssd_r,config.ssd_g,config.ssd_b,config.lane_r,config.lane_g,config.lane_b,config.lane_line_width);\n\tdisplayConfig_.enable_show_ssd = config.enable_show_ssd;\n\tdisplayConfig_.ssd_r= config.ssd_r;\n\tdisplayConfig_.ssd_g = config.ssd_g;\n\tdisplayConfig_.ssd_b = config.ssd_b;\n\tdisplayConfig_.ssd_line_width = config.ssd_line_width;\n\tdisplayConfig_.enable_show_label = config.enable_show_label;\t\n\n\tdisplayConfig_.enable_show_lane = config.enable_show_lane;\n\tdisplayConfig_.lane_r = config.lane_r;\n\tdisplayConfig_.lane_g= config.lane_g;\n\tdisplayConfig_.lane_b= config.lane_b;\n\tdisplayConfig_.lane_line_width= config.lane_line_width;\n\n\n\tif(displayConfig_.enable_show_ssd && displayConfig_.enable_show_lane)\n\t{\n\t\tif(!sub_image_raw_.getSubscriber())\n\t\t\tsub_image_raw_.subscribe();\n\n\t\tif(!sub_SSD_result_.getSubscriber())\n\t\t\tsub_SSD_result_.subscribe();\n\n\t\tif(!sub_lane_detect_result_.getSubscriber())\n\t\t\tsub_lane_detect_result_.subscribe();\n\n \t\tsync_all_ = new message_filters::Synchronizer<DisplayAllSyncPolicy>(DisplayAllSyncPolicy(10),sub_image_raw_, sub_lane_detect_result_, sub_SSD_result_);\n \t\tsync_all_->registerCallback(boost::bind(&Display::onLaneSSDDisplayCb,this,_1, _2,_3));\n\t}\n\telse if(displayConfig_.enable_show_ssd && !displayConfig_.enable_show_lane)\n\t{\t\n\t\tif(sub_lane_detect_result_.getSubscriber())\n\t\t{\n\t\t\tsub_lane_detect_result_.unsubscribe();\n\t\t}\n\n\t\tif(!sub_SSD_result_.getSubscriber())\n\t\t\tsub_SSD_result_.subscribe();\n\n\t\tif(!sub_image_raw_.getSubscriber())\n\t\t\tsub_image_raw_.subscribe();\n\n\t\tsync_ssd_ = new message_filters::Synchronizer<DisplaySSDSyncPolicy>(DisplaySSDSyncPolicy(10),sub_image_raw_, sub_SSD_result_);\n\t\tsync_ssd_->registerCallback(boost::bind(&Display::onSSDDisplayCb,this,_1, _2));\n\t}\n\telse if(!displayConfig_.enable_show_ssd && displayConfig_.enable_show_lane)\n\t{\t\t\n\t\tif(sub_SSD_result_.getSubscriber())\n\t\t\tsub_SSD_result_.unsubscribe();\n\n\t\tif(!sub_image_raw_.getSubscriber())\n\t\t\tsub_image_raw_.subscribe();\n\n\t\tif(!sub_lane_detect_result_.getSubscriber())\n\t\t\tsub_lane_detect_result_.subscribe();\n\n\t\tsync_lane_ = new message_filters::Synchronizer<DisplayLaneSyncPolicy>(DisplayLaneSyncPolicy(10),sub_image_raw_, sub_lane_detect_result_);\n\t\tsync_lane_->registerCallback(boost::bind(&Display::onLaneDisplayCb,this,_1, _2));\n\t}\n\telse\n\t{\n\t\tif(sub_SSD_result_.getSubscriber())\n\t\t\tsub_SSD_result_.unsubscribe();\n\t\n\t\tif(sub_lane_detect_result_.getSubscriber())\n\t\t\tsub_lane_detect_result_.unsubscribe();\n\n\t\tif(!sub_image_raw_.getSubscriber())\n\t\t\tsub_image_raw_.subscribe();\n\n\t\tsub_image_raw_.registerCallback(boost::bind(&Display::onImageCb,this,_1));\n\t}\n}\n\n\n\nint main(int argc, char** argv)\n{\n\tros::init(argc, argv, \"ssd_lane_display\");\n\tros::NodeHandle nh, nh_private(\"~\");\n\n\tDisplay display(nh, nh_private);\n\n\tros::Rate rate(50);\n\twhile(ros::ok())\n\t{\n\t\trate.sleep();\n\t\tros::spinOnce();\n\t}\n\n\treturn 0;\n}\n\n"
},
{
"alpha_fraction": 0.7315789461135864,
"alphanum_fraction": 0.7320573925971985,
"avg_line_length": 29.72058868408203,
"blob_id": "39768757c99c27777dd9472503021013689fdb4f",
"content_id": "f75677312c57a068c65582ad820dd06378ec7654",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2142,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 68,
"path": "/athena/examples/LCM/Singlecar/planning/planning_lcm_msg.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <lcm/lcm.h>\n#include <lcm/lcm-cpp.hpp>\n\n//OBU专用lcm消息列表\n#include \"obu_lcm/ins_info.hpp\"\n\n//dongfeng lcm\n#include \"obu_lcm/CAN_status.hpp\"\n#include \"obu_lcm/CAN_value.hpp\"\n#include \"obu_lcm/chassis_detail.hpp\"\n#include \"obu_lcm/vehicle_info.hpp\"\n\n//#include \"obu_lcm/accelerate_control_info.hpp\"\n//#include \"obu_lcm/accelerate_feedback_info.hpp\"\n//#include \"obu_lcm/brake_control_info.hpp\"\n//#include \"obu_lcm/brake_feedback_info.hpp\"\n//#include \"obu_lcm/gears_control_info.hpp\"\n//#include \"obu_lcm/gears_feedback_info.hpp\"\n//#include \"obu_lcm/steering_control_info.hpp\"\n//#include \"obu_lcm/steering_feedback_info.hpp\"\n//#include \"obu_lcm/bcm_control_info.hpp\"\n\n//#include \"obu_lcm/lateral_control_info.hpp\"\n#include \"obu_lcm/lateral_control_vui_info.hpp\"\n#include \"obu_lcm/nav_points.hpp\"\n#include \"obu_lcm/longitudinal_control_info.hpp\"\n\n//#include \"obu_lcm/point_t.hpp\"\n//#include \"obu_lcm/rect_t.hpp\"\n//#include \"obu_lcm/patch_grid.hpp\"\n//#include \"obu_lcm/patch_t.hpp\"\n\n//new msg\n#include \"nad_lcm/om_traffic_lights_report.hpp\"\n#include \"nad_lcm/section_m.hpp\"\n#include \"nad_lcm/line_xys.hpp\"\n#include \"nad_lcm/mo_change_lane_request.hpp\"\n#include \"nad_lcm/mo_info_report.hpp\" //5HZ向网络层上传车辆状态\n#include \"nad_lcm/om_info_report.hpp\"\n#include \"nad_lcm/point_m.hpp\"\n#include \"nad_lcm/point_xys.hpp\"\n#include \"nad_lcm/route_planning_m.hpp\"\n//#include \"nad_lcm/obu_command.hpp\"\n#include \"nad_lcm/om_change_lane_respond.hpp\"\n#include \"nad_lcm/om_route_respond.hpp\"\n\n#include \"nad_lcm/obstacle_info.hpp\"\n//#include \"nad_lcm/obstacle_list.hpp\"\n#include \"nad_lcm/sensor_obstacle_report.hpp\"\n#include \"nad_lcm/mo_obstacle_report.hpp\"\n\n\n\n//#include \"obu_lcm/obu_map_info.hpp\"\n#include \"nad_lcm/ou_start_auto_respond.hpp\"\n#include \"nad_lcm/ou_stop_auto_respond.hpp\"\n#include \"nad_lcm/ou_alarm_report.hpp\"\n\n#include \"nad_lcm/om_info_report.hpp\"\n#include \"nad_lcm/mo_change_lane_request.hpp\"\n#include \"nad_lcm/om_change_lane_respond.hpp\"\n\n#include \"obu_lcm/mt_info_report.hpp\"\n\n//#include \"nad_lcm/om_stop_request.hpp\"//停车位信息\n#include \"obu_lcm/back_coordinate_XYH.hpp\"//停车位信息\n\n"
},
{
"alpha_fraction": 0.6239495873451233,
"alphanum_fraction": 0.7331932783126831,
"avg_line_length": 16,
"blob_id": "799c54a904d8e2ae6cf4b2bd2570996718fd6173",
"content_id": "5d9b52bd1fc89324fd09ca950f8ad162edbba181",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 610,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 28,
"path": "/athena/examples/ROS/src/Perception/ssd_detection/config/rosCamera_config.ini",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#读取网络模型文件\nnetwork_Model =/home/oem/catkin_ws/src/ros_ssd_detection/config/deploy_508.prototxt \n#读取权重文件\npretrained_Weights = /home/oem/catkin_ws/src/ros_ssd_detection/config/VGG_VOC0712_SSD_300x300_iter_200508.caffemodel\n#横向主光点\nCX0 = 638\n#纵向主光点\nCY0 = 137\n#焦距\nFOCUS = 3232\n#相机横向位置\nPOSX = 0\n#相机高度\nPOSY = -1450\n#纵向位置\nPOSZ = 0;\n#俯仰角度值\nPITCH =-0.2\n#航向角度值\nYAW =0.014\n#垂直方向mm/pix \nm_per_pix_i=43.75\n#水平方向mm/pix\nm_per_pix_j=40.6\n#最低置信度阀值 \nmin_score_threshold=0.3\n#跟踪\nuse_track=false\n"
},
{
"alpha_fraction": 0.5327102541923523,
"alphanum_fraction": 0.5981308221817017,
"avg_line_length": 20.399999618530273,
"blob_id": "53af359e26a55e6869c9c2a80712a02d744baf9b",
"content_id": "e0b1c2bade1ef86612a28ff95f0f8da508f02672",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 214,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 10,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/brake/TRUCK_J6P/truck_j6p_deceleration_brake_map.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file cs55_deceleration_brake_map.h.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n double TruckJ6pGetBrakeVaule(double deceleration);\n"
},
{
"alpha_fraction": 0.4541691243648529,
"alphanum_fraction": 0.4801892340183258,
"avg_line_length": 18.43678092956543,
"blob_id": "9501066be99dc89f0a914e5cfc886e8167be954b",
"content_id": "6cd2d0a3ebac088440592425ad3d024c6003db79",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1817,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 87,
"path": "/athena/core/x86/Control/include/localization_.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file localization.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef LOCALIZATION__H_\n#define LOCALIZATION__H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class Localization\n *\n * @brief Location information.\n */\nclass Localization_\n{\n public:\n Localization_()\n {\n gps_time_ = 0.0;\n week_ = 0;\n lat_ = 0.0;\n lon_ = 0.0;\n height_ = 0.0;\n lateral_speed_ = 0.0;\n longitudinal_speed_ = 0.0;\n down_speed_ = 0.0;\n roll_ = 0.0;\n pitch_ = 0.0;\n heading_ = 0.0;\n lateral_accelerate_ = 0.0;\n longitudinal_accelerate_ = 0.0;\n down_accelerate_ = 0.0;\n roll_speed_ = 0.0;\n pitch_speed_ = 0.0;\n heading_speed_ = 0.0;\n }\n ~Localization_() = default;\n ///GPS时间\n double gps_time_;\n ///周\n int week_;\n ///经度\n double lat_;\n ///纬度\n double lon_;\n ///海拔\n double height_;\n ///横向速度\n double lateral_speed_;\n ///纵向速度\n double longitudinal_speed_;\n ///地向速度\n double down_speed_;\n ///横滚角度\n double roll_;\n ///俯仰角度\n double pitch_;\n ///航向角度\n double heading_;\n ///横向加速度\n double lateral_accelerate_;\n ///纵向加速度\n double longitudinal_accelerate_;\n ///地向加速度\n double down_accelerate_;\n ///横滚角速度\n double roll_speed_;\n ///俯仰角速度\n double pitch_speed_;\n ///航向角速度\n double heading_speed_;\n};\n}\n}\n\n#endif // LOCALIZATION_H_\n"
},
{
"alpha_fraction": 0.7051854133605957,
"alphanum_fraction": 0.7094780206680298,
"avg_line_length": 23.669490814208984,
"blob_id": "145626c842579a2c71d21e0b33c37eecf47b59be",
"content_id": "5c9d960748470684f6ebb55b48a6d86f207d9fd9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5824,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 236,
"path": "/athena/examples/ROS/src/Perception/lane_detect/src/lane_detect_node.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <ros/ros.h>\n#include <stdio.h>\n#include <vector>\n#include <dlfcn.h>\n#include <stdlib.h>\n#include <iostream>\n#include \"lane_utils.h\"\n#include <algorithm>\n\n#include <lane_detect/PixelPoints.h>\n\n//CV_BRIDGE\n#include <image_transport/image_transport.h>\n#include <cv_bridge/cv_bridge.h>\n#include <sensor_msgs/image_encodings.h>\n\n//ROS Service\n#include <lane_detect/LaneDetector.h>\n\n//\n#include <lane_detect/LaneDeectResult.h>\n\n//OpenCV\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n#include <opencv2/opencv.hpp> \n\n\n//Boost\n#include <boost/thread.hpp> \n#include <boost/bind.hpp> \n\n\n//#define DISPLAY_LANE_WITH_OPENCV 0\n\nclass LaneDecter{\npublic:\n\tLaneDecter(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private);\n\t~LaneDecter();\nprotected:\n\tvoid init(const char* config_file);\n\n\tvoid onImageCallback(const sensor_msgs::Image::ConstPtr& msg);\n\n\n\tbool LaneDetectorServer(lane_detect::LaneDetector::Request &req,\n lane_detect::LaneDetector::Response &res);\n\nprivate:\n\tros::NodeHandle nh_;\n\tros::NodeHandle nh_private_;\n\n\timage_transport::ImageTransport it_;\n \timage_transport::Subscriber sub_image_;\n \timage_transport::Publisher pub_image_;\n\t\n\tros::Publisher pub_lane_detection_result_,pub_lane_detection_pixel_result_;\t\n\n\n\tros::ServiceServer lane_detect_service_;\n\n\tstd::string config_file_=\"\";\n\tstd::string frame_id_=\"\";\n\n\tcv::Mat lane_detect_result_;\n\n\tlane_detect::LaneDeectResult laneDetectionResult_;\n\t//cv_bridge::CvImagePtr cv_ptr_in_;\n\t//cv_bridge::CvImagePtr cv_ptr_out_;\n\n#ifdef SAVE_IMAGE\n\tint image_count;\n#endif\n\n};\n\nLaneDecter::LaneDecter(const ros::NodeHandle& nh, const ros::NodeHandle& nh_private)\n\t:nh_(nh)\n\t,nh_private_(nh_private)\n\t,it_(nh_)\n#ifdef SAVE_IMAGE\n\t,image_count(0)\n#endif\n{\n\t//Param\n\tnh_private_.param<std::string>(\"config_file\", config_file_, \"line_config.ini\");\n\tnh_private_.param<std::string>(\"frame_id\", frame_id_, \"usb_camera\");\n\n\t//Init\n\tinit(config_file_.c_str());\n\n\t// Subscrive to input video feed and publish output video feed\n \tsub_image_ = it_.subscribe(\"/usb_cam/image_raw\", 1,&LaneDecter::onImageCallback, this);\n \tpub_image_ = it_.advertise(\"/usb_cam/image_lane_detect\", 1);\n\n\tpub_lane_detection_result_ = nh_.advertise<lane_detect::LaneDeectResult>(\"laneDetectQuality\",1);\n\tpub_lane_detection_pixel_result_ = nh_.advertise<lane_detect::PixelPoints>(\"laneDetectPixelPoints\",1);\n\t//Service\n\tlane_detect_service_ = nh_.advertiseService(\"laneDetect\", &LaneDecter::LaneDetectorServer,this);\n\n}\n\nLaneDecter::~LaneDecter(){}\n\n\nvoid LaneDecter::init(const char* config_file)\n{\n\t::init(config_file);\n}\n\n\nvoid LaneDecter::onImageCallback(const sensor_msgs::Image::ConstPtr& msg)\n{\n\n\tcv_bridge::CvImagePtr cv_ptr;\n \ttry\n \t{\n \t\tcv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);\n \t}\n \tcatch (cv_bridge::Exception& e)\n \t{\n \t\tROS_ERROR(\"cv_bridge exception: %s\", e.what());\n \t\treturn;\n \t}\n\n\tsetImage(cv_ptr->image);\n\tlane_detect_result_ = getResultImage();\n\n\n\t//tranform cv::Mat to sensor_msgs/Image through cv_bridge\n\tstd_msgs::Header header;\n\theader.stamp = msg->header.stamp;\n\theader.frame_id = frame_id_;\n\tstd::string encoding(\"bgr8\");\n\tcv_bridge::CvImagePtr cv_ptr_result = cv_bridge::CvImagePtr(new cv_bridge::CvImage(header,encoding,lane_detect_result_));\n\n\tpub_image_.publish(cv_ptr_result->toImageMsg());\n\n\t//lane detect quality\n\tint lane_quality;\n std::vector<point> leftPoints, rightPoints, leftImagePoints, rightImagePoints;\n getLaneInfos(&lane_quality, &leftPoints, &rightPoints);\n\tgetLaneImageInfos(&leftImagePoints, &rightImagePoints);\n\n#ifdef DISPLAY_LANE_WITH_OPENCV\n\tstd::vector<cv::KeyPoint> points;\t\n\tfor(int i=0;i<leftImagePoints.size();i++)\n\t{\n\t\tpoints.push_back(cv::KeyPoint(leftImagePoints.at(i).x,leftImagePoints.at(i).y,2.0));\n\t}\n\tcv::drawKeypoints(cv_ptr->image,points,cv_ptr->image,cv::Scalar(0,255,0));\n\tcv::imshow(\"lane points\",cv_ptr->image);\n\tcv::waitKey(1);\n#endif //DISPLAY_LANE_WITH_OPENCV\n\n\t//Publish lane detection pixel point\n\tlane_detect::PixelPoints pixelPoints;\n\tgeometry_msgs::Point32 pixelPoint;\n\tfor(int i=0;i<leftImagePoints.size();i++)\n\t{\n\n\t\tpixelPoint.x = leftImagePoints.at(i).x;\n\t\tpixelPoint.y = leftImagePoints.at(i).y;\n\n\t\tpixelPoints.leftpoints.push_back(pixelPoint);\n\n\t}\n\tfor(int i=0;i<rightImagePoints.size();i++)\n\t{\n\t\tpixelPoint.x = rightImagePoints.at(i).x;\n\t\tpixelPoint.y = rightImagePoints.at(i).y;\n\n\t\tpixelPoints.rightpoints.push_back(pixelPoint);\n\t}\n\n\n\tpixelPoints.header.stamp = msg->header.stamp;\n\tpixelPoints.header.frame_id = \"lane_pixel\";\n\tpub_lane_detection_pixel_result_.publish(pixelPoints);\n\n\n\n\t//Publish lane detection result\n\tlaneDetectionResult_.header.stamp = msg->header.stamp;\n\tlaneDetectionResult_.header.frame_id = frame_id_;\n\tlaneDetectionResult_.quality = \tlane_quality;\n\tfor(auto iter=leftPoints.begin(); iter != leftPoints.end(); iter++)\n\t{ \n\t\tlane_detect::LanePoint lanePoint;\n\t\tlanePoint.x = iter->x;\n\t\tlanePoint.y = iter->y;\n\t\tlanePoint.road_x = iter->road_x;\n\t\tlanePoint.road_y = iter->road_y;\n\t\t\n\t\tlaneDetectionResult_.left_points.push_back(lanePoint);\n\t}\n\n\tfor(auto iter=rightPoints.begin(); iter != rightPoints.end(); iter++)\n\t{\n\t\tlane_detect::LanePoint lanePoint;\n\t\tlanePoint.x = iter->x;\n\t\tlanePoint.y = iter->y;\n\t\tlanePoint.road_x = iter->road_x;\n\t\tlanePoint.road_y = iter->road_y;\n\t\t\n\t\tlaneDetectionResult_.right_points.push_back(lanePoint);\n\t}\n\n\tpub_lane_detection_result_.publish(laneDetectionResult_);\n}\n\n\nbool LaneDecter::LaneDetectorServer(lane_detect::LaneDetector::Request &req,\n lane_detect::LaneDetector::Response &res)\n{\n\treturn true;\n}\n\n\n\n\n\n\nint main(int argc, char** argv)\n{\n\tros::init(argc, argv, \"lane_detector_node\");\n\n\tros::NodeHandle nh;\n\tros::NodeHandle nh_private(\"~\");\n\n\tLaneDecter laneDetector(nh,nh_private);\n\n\tros::spin();\n\t\n\treturn 0;\n}\n\n\n"
},
{
"alpha_fraction": 0.5629110336303711,
"alphanum_fraction": 0.5716791152954102,
"avg_line_length": 25.523256301879883,
"blob_id": "64e9cf7a058387d1d4f9a7709144d08da94b9ad7",
"content_id": "351a18ced74899c7d8bd70ff742f8a7ebdc96290",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2417,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 86,
"path": "/athena/core/arm/Planning/include/map_matching/map_matching.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n\n//#include \"cs.h\"\n#include \"common/path.h\"\n#include \"common/car_state.h\"\n#include \"planning/route_data.h\"\n\nclass map_matching\n{\nprivate:\n // 最近的匹配点中的相关信息。\n int last_match_point_no;\n int next_match_point_no;\n int current_match_point_no;\n\n bool b_map_update = false;\n //int last_path_num;\n\npublic:\n map_matching();\n ~map_matching();\n\n void init();\n\n // 地图匹配算法\n int MapMarch_Min_Distance_motion_planning(double Current_X,\n double Current_Y,\n double Current_heading, //当前的头指向\n path& p,\n int length,\n double& min_error,\n double length_s);\n\n ///zp20171026:点匹配边界线\n int MapMarch_Min_Distance_motion_planning(double Current_X,\n double Current_Y,\n line_xys& edge_line,\n int st_pos,\n int en_pos,\n double& min_error,\n double length_s);\n\n ///zp20171109:点匹配center_insert\n int MapMarch_Min_Distance_motion_planning(double Current_X,\n double Current_Y,\n path& center_insert,\n int st_pos,\n int en_pos,\n double& min_error,\n double length_s);\n\n //****min_error:点离轨迹点最小距离; min_length:考虑方向后点代价值\n int MapMarch_Min_Distance_motion_planning(\n CarState car_state,\n path& p,\n bool is_map_update,\n double length_s, //length_s: 局部搜索长度一般赋值 15m\n int num_before,\n double& min_error,\n int cur_pos ); //min_error: 搜索峰值限定 10m\n\n int MapMarch_Min_Distance_mapping(double Current_X,\n double Current_Y,\n path& p, int length,\n double& min_error);\n\n\n int find_next_moition_planning_points(path p, double length);\n int find_next_moition_planning_points(path p, int start_pos, double length);\n\n int find_moition_planning_before_points(path p, double length);\n\n int find_moition_planning_start_points(path p, double length);\n\n int get_current_match_point_no();\n\n void set_current_match_point_no(int current_no);\n\n int get_last_match_point_no();\n\n void set_last_match_point_no(int last_no);\n\n int set_b_map_update();\n};\n"
},
{
"alpha_fraction": 0.5943295359611511,
"alphanum_fraction": 0.6030944585800171,
"avg_line_length": 42.412994384765625,
"blob_id": "eaed515c67798d2f36a8dfedff852cf400c7165b",
"content_id": "a9c9f7657e4cb721e7328e415a3f0b7cdab280c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 37482,
"license_type": "no_license",
"max_line_length": 233,
"num_lines": 862,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/msg/nad_msg.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_msg.cpp\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:引用了msg目录下的所有头文件\n-------------------------------------------------------*/\n\n\n#include \"nad_msg.h\"\n\nstring log_rc_rsu_login_request( const nad_lcm::ne_msg_t<nad_lcm::rc_rsu_login_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_rsu_login_request(%s->%s): rsu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str());\n return string(ret);\n}\n\nstring log_cr_rsu_login_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_rsu_login_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_rsu_login_respond(%s->%s): rsu=%s, ret=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str(), ret_str(msg->body.retcode));\n return string(ret);\n}\n\nstring log_cr_rsu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::cr_rsu_logout_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_rsu_logout_notify(%s->%s): rsu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str());\n return string(ret);\n}\n\nstring log_oc_rsu_name_request( const nad_lcm::ne_msg_t<nad_lcm::oc_rsu_name_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"oc_rsu_name_request(%s->%s): obu=%s, lon=%.6f, lat=%.6f\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), msg->body.obu_lon, msg->body.obu_lat);\n return string(ret);\n}\n\nstring log_co_rsu_name_respond( const nad_lcm::ne_msg_t<nad_lcm::co_rsu_name_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"co_rsu_name_respond(%s->%s): rsu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str());\n return string(ret);\n}\n\nstring log_or_obu_login_request( const nad_lcm::ne_msg_t<nad_lcm::or_obu_login_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"or_obu_login_request(%s->%s): rsu=%s, obu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str(), msg->body.obu_name.c_str());\n return string(ret);\n}\n\nstring log_rc_obu_login_request( const nad_lcm::ne_msg_t<nad_lcm::rc_obu_login_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_obu_login_request(%s->%s): rsu=%s, obu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str(), msg->body.obu_name.c_str());\n return string(ret);\n}\n\nstring log_cr_obu_login_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_obu_login_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_obu_login_respond(%s->%s): rsu=%s, obu=%s, ret=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str(), msg->body.obu_name.c_str(), ret_str(msg->body.retcode));\n return string(ret);\n}\n\nstring log_ro_obu_login_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_obu_login_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_obu_login_respond(%s->%s): rsu=%s, obu=%s, ret=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu_name.c_str(), msg->body.obu_name.c_str(), ret_str(msg->body.retcode));\n return string(ret);\n}\n\nstring log_cr_obu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::cr_obu_logout_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_obu_logout_notify(%s->%s): obu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str());\n return string(ret);\n}\n\nstring log_rc_obu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::rc_obu_logout_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_obu_logout_notify(%s->%s): obu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str());\n return string(ret);\n}\n\nstring log_ro_obu_logout_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_obu_logout_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_obu_logout_notify(%s->%s): obu=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str());\n return string(ret);\n}\n\nstring log_uo_route_request( const nad_lcm::uo_route_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uo_route_request: obu=%s, kp=%d, dest=%s\",\n msg->obu_name.c_str(), msg->num_of_kp, msg->destination.c_str());\n return string(ret);\n}\n\nstring log_or_route_request( const nad_lcm::ne_msg_t<nad_lcm::or_route_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"or_route_request(%s->%s): obu=%s, kp=%d, dest=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), msg->body.num_of_kp, msg->body.destination.c_str(), route_reason_str(msg->body.route_reason));\n return string(ret);\n}\n\nstring log_rc_route_request( const nad_lcm::ne_msg_t<nad_lcm::rc_route_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_route_request(%s->%s): obu=%s, kp=%d, dest=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), msg->body.num_of_kp, msg->body.destination.c_str(), route_reason_str(msg->body.route_reason));\n return string(ret);\n}\n\nstring log_cr_route_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_route_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_route_respond(%s->%s): obu=%s, ret=%s, kp=%d, lane=%d, time=%ld, dest=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), msg->body.route.num_of_kp, msg->body.route.num_of_lane, msg->body.route.time_stamp, msg->body.route.destination.c_str(), route_reason_str(msg->body.route.route_reason));\n return string(ret);\n}\n\nstring log_ro_route_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_route_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_route_respond(%s->%s): obu=%s, ret=%s, kp=%d, lane=%d, time=%ld, dest=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), msg->body.route.num_of_kp, msg->body.route.num_of_lane, msg->body.route.time_stamp, msg->body.route.destination.c_str(), route_reason_str(msg->body.route.route_reason));\n return string(ret);\n}\n\nstring log_rc_route_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_route_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_route_respond(%s->%s): obu=%s, ret=%s, kp=%d, lane=%d, time=%ld, dest=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), msg->body.route.num_of_kp, msg->body.route.num_of_lane, msg->body.route.time_stamp, msg->body.route.destination.c_str(), route_reason_str(msg->body.route.route_reason));\n return string(ret);\n}\n\nstring log_ou_route_respond( const nad_lcm::ou_route_respond *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_route_respond: obu=%s, ret=%s, kp=%d, lane=%d, time=%ld, dest=%s, reason=%s\",\n msg->obu_name.c_str(), ret_str(msg->retcode), msg->route.num_of_kp, msg->route.num_of_lane, msg->route.time_stamp, msg->route.destination.c_str(), route_reason_str(msg->route.route_reason));\n return string(ret);\n}\n\nstring log_uo_start_auto_request( const nad_lcm::uo_start_auto_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uo_start_auto_request: obu=%s, time=%ld\",\n msg->obu_name.c_str(), msg->time_stamp);\n return string(ret);\n}\n\nstring log_or_start_auto_request( const nad_lcm::ne_msg_t<nad_lcm::or_start_auto_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"or_start_auto_request(%s->%s): obu=%s, time=%ld, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), msg->body.time_stamp, start_reason_str(msg->body.start_reason));\n return string(ret);\n}\n\nstring log_rc_start_auto_request( const nad_lcm::ne_msg_t<nad_lcm::rc_start_auto_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_start_auto_request(%s->%s): obu=%s, time=%ld, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), msg->body.time_stamp, start_reason_str(msg->body.start_reason));\n return string(ret);\n}\n\nstring log_cr_start_auto_respond( const nad_lcm::ne_msg_t<nad_lcm::cr_start_auto_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_start_auto_respond(%s->%s): obu=%s, ret=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), start_reason_str(msg->body.start_reason));\n return string(ret);\n}\n\nstring log_ro_start_auto_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_start_auto_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_start_auto_respond(%s->%s): obu=%s, ret=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), start_reason_str(msg->body.start_reason));\n return string(ret);\n}\n\nstring log_ou_start_auto_respond( const nad_lcm::ou_start_auto_respond *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_start_auto_respond: obu=%s, ret=%s, reason=%s\",\n msg->obu_name.c_str(), ret_str(msg->retcode), start_reason_str(msg->start_reason));\n return string(ret);\n}\n\nstring log_uo_stop_auto_request( const nad_lcm::uo_stop_auto_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uo_stop_auto_request: obu=%s\",\n msg->obu_name.c_str());\n return string(ret);\n}\n\nstring log_or_stop_auto_notify( const nad_lcm::ne_msg_t<nad_lcm::or_stop_auto_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"or_stop_auto_notify(%s->%s): obu=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), stop_reason_str(msg->body.stop_reason));\n return string(ret);\n}\n\nstring log_rc_stop_auto_notify( const nad_lcm::ne_msg_t<nad_lcm::rc_stop_auto_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_stop_auto_notify(%s->%s): obu=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), stop_reason_str(msg->body.stop_reason));\n return string(ret);\n}\n\nstring log_cu_stop_auto_notify( const nad_lcm::ne_msg_t<nad_lcm::cu_stop_auto_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_stop_auto_notify(%s->%s): obu=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), stop_reason_str(msg->body.stop_reason));\n return string(ret);\n}\n\nstring log_ro_stop_auto_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_stop_auto_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_stop_auto_respond(%s->%s): obu=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), stop_reason_str(msg->body.stop_reason));\n return string(ret);\n}\n\nstring log_ou_stop_auto_respond( const nad_lcm::ou_stop_auto_respond *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_stop_auto_respond: obu=%s, ret=%s, reason=%s\",\n msg->obu_name.c_str(), ret_str(msg->retcode), stop_reason_str(msg->stop_reason));\n return string(ret);\n}\n\nstring log_dr_info_report( const nad_lcm::ne_msg_t<nad_lcm::dr_info_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"dr_info_report(%s->%s): sensor=%d, obstacle=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.num_of_sensor, msg->body.num_of_obstacle);\n return string(ret);\n}\n\nstring log_or_info_report( const nad_lcm::ne_msg_t<nad_lcm::or_info_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"or_info_report(%s->%s): obu=%s, obstacle=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu.obu_name.c_str(), msg->body.num_of_obstacle);\n return string(ret);\n}\n\nstring log_rc_info_report( const nad_lcm::ne_msg_t<nad_lcm::rc_info_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_info_report(%s->%s): rsu=%s, light=%d, limspeed=%d, block=%d, platoon=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.rsu.rsu_name.c_str(), msg->body.num_of_light, msg->body.num_of_limspeed, msg->body.num_of_block, msg->body.num_of_platoon);\n return string(ret);\n}\n\nstring log_cu_info_report( const nad_lcm::ne_msg_t<nad_lcm::cu_info_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_info_report(%s->%s): rsu=%d, light=%d, limspeed=%d, block=%d, platoon=%d, task=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.num_of_rsu, msg->body.num_of_light, msg->body.num_of_limspeed, msg->body.num_of_block, msg->body.num_of_platoon, msg->body.num_of_task);\n return string(ret);\n}\n\nstring log_cr_info_report( const nad_lcm::ne_msg_t<nad_lcm::cr_info_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_info_report(%s->%s): ret=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n ret_str(msg->body.retcode));\n return string(ret);\n}\n\nstring log_mo_change_lane_request( const nad_lcm::mo_change_lane_request *msg)\n{\n char ret[256];\n sprintf(ret, \"mo_change_lane_request: dir=%s, start=%d, end=%d, reason=%s, status=%s\",\n cl_direction_str(msg->direction), msg->starting_lane, msg->ending_lane, cl_reason_str(msg->reason), cl_status_str(msg->status));\n return string(ret);\n}\n\nstring log_or_change_lane_request( const nad_lcm::ne_msg_t<nad_lcm::or_change_lane_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"or_change_lane_request(%s->%s): obu=%s, dir=%s, start=%d, end=%d, reason=%s, status=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), cl_direction_str(msg->body.direction), msg->body.starting_lane, msg->body.ending_lane, cl_reason_str(msg->body.reason), cl_status_str(msg->body.status));\n return string(ret);\n}\n\nstring log_ro_change_lane_respond( const nad_lcm::ne_msg_t<nad_lcm::ro_change_lane_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_change_lane_respond(%s->%s): obu=%s, ret=%s, dir=%s, start=%d, end=%d, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), cl_direction_str(msg->body.direction), msg->body.starting_lane, msg->body.ending_lane, cl_reason_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_om_change_lane_respond( const nad_lcm::om_change_lane_respond *msg)\n{\n char ret[256];\n sprintf(ret, \"om_change_lane_respond: ret=%s, dir=%s, end=%d, reason=%s\",\n ret_str(msg->retcode), cl_direction_str(msg->direction), msg->ending_lane, cl_reason_str(msg->reason));\n return string(ret);\n}\n\nstring log_uc_add_platoon_request( const nad_lcm::uc_add_platoon_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_add_platoon_request: platoon=%d, speed=%.1fkm/h, dest=%s\",\n msg->num_of_platoon, msg->speed, msg->destination.c_str());\n return string(ret);\n}\n\nstring log_cr_add_platoon_request( const nad_lcm::ne_msg_t<nad_lcm::cr_add_platoon_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_add_platoon_request(%s->%s): platoon=%d, speed=%.1fkm/h, dest=%s, kp=%d, lane=%d, time=%ld\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.num_of_platoon, msg->body.speed, msg->body.destination.c_str(), msg->body.route.num_of_kp, msg->body.route.num_of_lane, msg->body.route.time_stamp);\n return string(ret);\n}\n\nstring log_ro_add_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_add_platoon_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_add_platoon_notify(%s->%s): platoon=%d, speed=%.1fkm/h, dest=%s, kp=%d, lane=%d, time=%ld\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.num_of_platoon, msg->body.speed, msg->body.destination.c_str(), msg->body.route.num_of_kp, msg->body.route.num_of_lane, msg->body.route.time_stamp);\n return string(ret);\n}\n\nstring log_ou_add_platoon_notify( const nad_lcm::ou_add_platoon_notify *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_add_platoon_notify: platoon=%d, speed=%.1fkm/h, dest=%s, kp=%d, lane=%d, time=%ld\",\n msg->num_of_platoon, msg->speed, msg->destination.c_str(), msg->route.num_of_kp, msg->route.num_of_lane, msg->route.time_stamp);\n return string(ret);\n}\n\nstring log_rc_add_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_add_platoon_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_add_platoon_notify(%s->%s): speed=%.1fkm/h, dest=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.speed, msg->body.destination.c_str(), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_cu_add_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_add_platoon_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_add_platoon_respond(%s->%s): speed=%.1fkm/h, dest=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.speed, msg->body.destination.c_str(), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_uc_set_platoon_request( const nad_lcm::uc_set_platoon_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_set_platoon_request: type=%s, obu=%s, speed=%.1fkm/h\",\n sp_str(msg->type), msg->obu_name.c_str(), msg->speed);\n return string(ret);\n}\n\nstring log_cr_set_platoon_request( const nad_lcm::ne_msg_t<nad_lcm::cr_set_platoon_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_set_platoon_request(%s->%s): type=%s, obu=%s, speed=%.1fkm/h\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n sp_str(msg->body.type), msg->body.obu_name.c_str(), msg->body.speed);\n return string(ret);\n}\n\nstring log_ro_set_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_set_platoon_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_set_platoon_notify(%s->%s): type=%s, obu=%s, speed=%.1fkm/h\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n sp_str(msg->body.type), msg->body.obu_name.c_str(), msg->body.speed);\n return string(ret);\n}\n\nstring log_ou_set_platoon_notify( const nad_lcm::ou_set_platoon_notify *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_set_platoon_notify: type=%s, obu=%s, speed=%.1fkm/h\",\n sp_str(msg->type), msg->obu_name.c_str(), msg->speed);\n return string(ret);\n}\n\nstring log_rc_set_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_set_platoon_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_set_platoon_notify(%s->%s): type=%s, obu=%s, speed=%.1fkm/h, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n sp_str(msg->body.type), msg->body.obu_name.c_str(), msg->body.speed, ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_cu_set_platoon_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_set_platoon_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_set_platoon_respond(%s->%s): type=%s, obu=%s, speed=%.1fkm/h, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n sp_str(msg->body.type), msg->body.obu_name.c_str(), msg->body.speed, ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_uc_delete_platoon_request(const nad_lcm::uc_delete_platoon_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_delete_platoon_request: reason=%s\",\n dpr_str(msg->reason));\n return string(ret);\n}\n\nstring log_cr_delete_platoon_request(const nad_lcm::ne_msg_t<nad_lcm::cr_delete_platoon_request> *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_delete_platoon_request(%s->%s): reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n dpr_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_rc_delete_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::rc_delete_platoon_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_delete_platoon_notify(%s->%s): reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n dpr_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_cu_delete_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::cu_delete_platoon_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_delete_platoon_notify(%s->%s): reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n dpr_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_ro_delete_platoon_notify( const nad_lcm::ne_msg_t<nad_lcm::ro_delete_platoon_notify > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_delete_platoon_notify(%s->%s): reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n dpr_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_ou_delete_platoon_notify( const nad_lcm::ou_delete_platoon_notify *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_delete_platoon_notify: reason=%s\",\n dpr_str(msg->reason));\n return string(ret);\n}\n\nstring log_uc_oct_login_request( const nad_lcm::uc_oct_login_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_oct_login_request: user=%s, pwd=%s\",\n msg->csu_user.c_str(), msg->csu_password.c_str());\n return string(ret);\n}\n\nstring log_cu_oct_login_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_oct_login_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_oct_login_respond(%s->%s): csu=%s, ret=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.csu_name.c_str(), ret_str(msg->body.retcode));\n return string(ret);\n}\n\nstring log_uc_config_request( const nad_lcm::uc_config_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_config_request: csu=%s\",\n msg->csu_name.c_str());\n return string(ret);\n}\n\nstring log_cu_config_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_config_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_config_respond(%s->%s): csu=%s, obu=%d, rsu=%d, rsd=%d, task=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.csu_name.c_str(), msg->body.num_of_obu, msg->body.num_of_rsu, msg->body.num_of_rsd, msg->body.num_of_task);\n return string(ret);\n}\n\nstring log_uc_exec_task_request( const nad_lcm::uc_exec_task_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_exec_task_request: task=%s\",\n msg->task_name.c_str());\n return string(ret);\n}\n\nstring log_cu_exec_task_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_exec_task_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_exec_task_respond(%s->%s): task=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.task_name.c_str(), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_uc_stop_task_request( const nad_lcm::uc_stop_task_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_stop_task_request: task=%s\",\n msg->task_name.c_str());\n return string(ret);\n}\n\nstring log_cu_stop_task_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_stop_task_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_stop_task_respond(%s->%s): task=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.task_name.c_str(), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_cr_exec_task_func_request(const nad_lcm::ne_msg_t<nad_lcm::cr_exec_task_func_request> *msg)\n{\n char ret[2048];\n sprintf(ret, \"cr_exec_task_func_request(%s->%s): task=%s, seq=%d, func=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.task_name.c_str(), msg->body.seq_no, msg->body.task_func.c_str());\n return string(ret);\n}\n\nstring log_rc_exec_task_func_respond(const nad_lcm::ne_msg_t<nad_lcm::rc_exec_task_func_respond> *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_exec_task_func_respond(%s->%s): task=%s, seq=%d, fret=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.task_name.c_str(), msg->body.seq_no, msg->body.task_ret.c_str(), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_uc_set_ets_request( const nad_lcm::uc_set_ets_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_set_ets_request: id=%s, type=%s, val=%d, reason=%s\",\n msg->ets_id.c_str(), tet_str(msg->ets_type), msg->ets_value, ets_reason_str(msg->reason));\n return string(ret);\n}\n\nstring log_cr_set_ets_request( const nad_lcm::ne_msg_t<nad_lcm::cr_set_ets_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_set_ets_request(%s->%s): id=%s, type=%s, val=%d, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), msg->body.ets_value, ets_reason_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_rc_set_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_set_ets_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_set_ets_respond(%s->%s): id=%s, type=%s, val=%d, reason=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), msg->body.ets_value, ets_reason_str(msg->body.reason), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_cu_set_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_set_ets_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_set_ets_respond(%s->%s): id=%s, type=%s, val=%d, reason=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), msg->body.ets_value, ets_reason_str(msg->body.reason), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_uc_add_ets_request( const nad_lcm::uc_add_ets_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_add_ets_request: id=%s, type=%s, val=%d, reason=%s\",\n msg->ets_id.c_str(), tet_str(msg->ets_type), msg->ets_value, ets_reason_str(msg->reason));\n return string(ret);\n}\n\nstring log_cr_add_ets_request( const nad_lcm::ne_msg_t<nad_lcm::cr_add_ets_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_add_ets_request(%s->%s): id=%s, type=%s, val=%d, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), msg->body.ets_value, ets_reason_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_rc_add_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_add_ets_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_add_ets_respond(%s->%s): id=%s, type=%s, val=%d, reason=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), msg->body.ets_value, ets_reason_str(msg->body.reason), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_cu_add_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_add_ets_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_add_ets_respond(%s->%s): id=%s, type=%s, val=%d, reason=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), msg->body.ets_value, ets_reason_str(msg->body.reason), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_uc_delete_ets_request( const nad_lcm::uc_delete_ets_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_delete_ets_request: id=%s, type=%s, reason=%s\",\n msg->ets_id.c_str(), tet_str(msg->ets_type), ets_reason_str(msg->reason));\n return string(ret);\n}\n\nstring log_cr_delete_ets_request( const nad_lcm::ne_msg_t<nad_lcm::cr_delete_ets_request > *msg)\n{\n char ret[256];\n sprintf(ret, \"cr_delete_ets_request(%s->%s): id=%s, type=%s, reason=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), ets_reason_str(msg->body.reason));\n return string(ret);\n}\n\nstring log_rc_delete_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::rc_delete_ets_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_delete_ets_respond(%s->%s): id=%s, type=%s, reason=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), ets_reason_str(msg->body.reason), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_cu_delete_ets_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_delete_ets_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_delete_ets_respond(%s->%s): id=%s, type=%s, reason=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.ets_id.c_str(), tet_str(msg->body.ets_type), ets_reason_str(msg->body.reason), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_rc_log_report( const nad_lcm::ne_msg_t<nad_lcm::rc_log_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_log_report(%s->%s): level=%s, log=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n log_str(msg->body.log_level), msg->body.log.c_str());\n return string(ret);\n}\n\nstring log_cu_log_report( const nad_lcm::ne_msg_t<nad_lcm::cu_log_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_log_report(%s->%s): level=%s, log=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n log_str(msg->body.log_level), msg->body.log.c_str());\n return string(ret);\n}\n\nstring log_rc_alarm_report( const nad_lcm::ne_msg_t<nad_lcm::rc_alarm_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"rc_alarm_report(%s->%s): level=%s, alarm=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n alarm_str(msg->body.alarm_level), msg->body.alarm.c_str());\n return string(ret);\n}\n\nstring log_cu_alarm_report( const nad_lcm::ne_msg_t<nad_lcm::cu_alarm_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_alarm_report(%s->%s): level=%s, alarm=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n alarm_str(msg->body.alarm_level), msg->body.alarm.c_str());\n return string(ret);\n}\n\nstring log_ro_log_report( const nad_lcm::ne_msg_t<nad_lcm::ro_log_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_log_report(%s->%s): level=%s, log=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n log_str(msg->body.log_level), msg->body.log.c_str());\n return string(ret);\n}\n\nstring log_ou_log_report( const nad_lcm::ou_log_report *msg)\n{\n char ret[256];\n sprintf(ret, \"ou_log_report: level=%s, log=%s\",\n log_str(msg->log_level), msg->log.c_str());\n return string(ret);\n}\n\nstring log_ro_alarm_report( const nad_lcm::ne_msg_t<nad_lcm::ro_alarm_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_alarm_report(%s->%s): level=%s, alarm=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n alarm_str(msg->body.alarm_level), msg->body.alarm.c_str());\n return string(ret);\n}\n\nstring log_ou_alarm_report( const nad_lcm::ou_alarm_report *msg)\n{\n char ret[256];\n sprintf(ret, \"ro_alarm_report: level=%s, alarm=%s\",\n alarm_str(msg->alarm_level), msg->alarm.c_str());\n return string(ret);\n}\n\nstring log_sensor_obstacle_report( const nad_lcm::sensor_obstacle_report *msg)\n{\n char ret[256];\n sprintf(ret, \"sensor_obstacle_report: obstacle=%d\",\n msg->num_of_obstacle);\n return string(ret);\n}\n\nstring log_uc_call_car_request( const nad_lcm::uc_call_car_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_call_car_request: obu=%s, kp=%d, dest=%s\",\n msg->obu_name.c_str(), msg->num_of_kp, msg->destination.c_str());\n return string(ret);\n}\n\nstring log_cu_call_car_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_call_car_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_call_car_respond(%s->%s): obu=%s, ret=%s, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), ret_str(msg->body.retcode), msg->body.description.c_str());\n return string(ret);\n}\n\nstring log_om_center_line_report( const nad_lcm::om_center_line_report *msg)\n{\n char ret[256];\n sprintf(ret, \"om_center_line_report: replan=%d, lanes=%d\",\n msg->replan_flag, msg->num_of_lanes);\n return string(ret);\n}\n\nstring log_uo_upcall_request( const nad_lcm::uo_upcall_request *msg)\n{\n char ret[512];\n sprintf(ret, \"uo_upcall_request: obu=%s, upcall=%s, platoon=%d, speed=%.1fkm/h, dest=%s, type=%d, reason=%d, kp=%d\",\n msg->obu_name.c_str(), upcall_str(msg->upcall_type), msg->num_of_platoon, msg->speed, msg->destination.c_str(), msg->type, msg->reason, msg->num_of_kp);\n return string(ret);\n}\n\nstring log_or_upcall_request( const nad_lcm::ne_msg_t<nad_lcm::or_upcall_request > *msg)\n{\n char ret[512];\n sprintf(ret, \"or_upcall_request(%s->%s): obu=%s, upcall=%s, platoon=%d, speed=%.1fkm/h, dest=%s, type=%d, reason=%d, kp=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), upcall_str(msg->body.upcall_type), msg->body.num_of_platoon, msg->body.speed, msg->body.destination.c_str(), msg->body.type, msg->body.reason, msg->body.num_of_kp);\n return string(ret);\n}\n\nstring log_rc_upcall_request( const nad_lcm::ne_msg_t<nad_lcm::rc_upcall_request > *msg)\n{\n char ret[512];\n sprintf(ret, \"rc_upcall_request(%s->%s): obu=%s, upcall=%s, platoon=%d, speed=%.1fkm/h, dest=%s, type=%d, reason=%d, kp=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), upcall_str(msg->body.upcall_type), msg->body.num_of_platoon, msg->body.speed, msg->body.destination.c_str(), msg->body.type, msg->body.reason, msg->body.num_of_kp);\n return string(ret);\n}\n\n//oct 新增\nstring log_uc_call_park_car_request( const nad_lcm::uc_call_park_car_request *msg)\n{\n char ret[256];\n sprintf(ret, \"uc_call_park_car_request(): obu=%s, des=%s\",\n msg->obu_name.c_str(), msg->destination.c_str());\n return string(ret);\n}\nstring log_cu_call_park_info_report( const nad_lcm::ne_msg_t<nad_lcm::cu_call_park_info_report > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_call_park_info_report(%s->%s): obu=%s, call_park_state=%d\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu.obu_name.c_str(), msg->body.car_status);\n return string(ret);\n}\nstring log_cu_call_park_car_respond( const nad_lcm::ne_msg_t<nad_lcm::cu_call_park_car_respond > *msg)\n{\n char ret[256];\n sprintf(ret, \"cu_call_park_car_respond(%s->%s): obu=%s, ret=%d, desc=%s\",\n msg->header.local_ne_name.c_str(), msg->header.peer_ne_name.c_str(),\n msg->body.obu_name.c_str(), msg->body.retcode, msg->body.description.c_str());\n return string(ret);\n}\n"
},
{
"alpha_fraction": 0.6962096095085144,
"alphanum_fraction": 0.6999256610870361,
"avg_line_length": 44.226890563964844,
"blob_id": "f1a3a0b33c6ebbcdfdf5e7953a672b605edba1c3",
"content_id": "5b0825afb97581f2c21128a363a31351d21207a7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5412,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 119,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/BallotBox.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerLines.h\"\n\nclass BallotBox{\nprivate:\n\tint _iHeight;\t //投票框高\n\tint _iVotingNumber;\t //投票数\n\tint *_piVotingTableOffset; //[CS4_NEARIMAGEHEIGHT][CS4_NUNBER_OF_NEAR_BALLOT];\n\tint *_piVotingTableYaw; //[CS4_NEARIMAGEHEIGHT][CS4_NUNBER_OF_NEAR_BALLOT];\n\tint *_piVotTablCnt; //[CS4_NEARIMAGEHEIGHT];\n\tint *_piVotingTableWeight; //[CS4_NEARIMAGEHEIGHT][CS4_NUNBER_OF_NEAR_BALLOT];\n\n\tint _iNumberOfOffset;\t // offset\n\tint _iNumberOfYaw;\t\t // yaw\n\tunsigned int *_piBallotBox;\t //\n\tint _iVotingPointNumber;\t //\n\tint _iWeightMax;\t\t\t //\n\tint _iMaxYaw;\t\t\t\t // 最大倾角\n\tint _iOffsetMin;\t //\n\tint _iOffsetMax;\t //\n\tint _iYawMin;\t\t //\n\tint _iYawMax;\t\t //\n\tint _iVoteingThreshold;\t // 投票阈值\n\tint _SearchOffsetMin;\t //\n\tint _SearchOffsetMax;\t //\n\tint _SearchYawMin;\t\t //\n\tint _SearchYawMax;\t\t //\n\npublic:\n\tinline BallotBox(void)\t{\n\t\t_iHeight = CS4_NEARIMAGEHEIGHT;\n\t\t_iVotingNumber = CS4_NUNBER_OF_NEAR_BALLOT;\n\t\t_piVotingTableOffset = NULL;\n\t\t_piVotingTableYaw = NULL;\n\t\t_piVotTablCnt = NULL;\n\t\t_piVotingTableWeight = NULL;\n\n\t\t_iNumberOfOffset = CS4_NUMBER_OF_NEAR_OFFSET;\n\t\t_iNumberOfYaw = CS4_NUMBER_OF_NEAR_YAW;\n\t\t_piBallotBox = NULL;\n\t\t_iVotingPointNumber = 0;\n\t\t_iWeightMax = CS4_WEIGHT_MAX;\n\t\t_iMaxYaw = CS4_NEAR_MAX_YAW;\n\t\t_iOffsetMin = 0;\n\t\t_iOffsetMax = CS4_NUMBER_OF_NEAR_OFFSET - 1;\n\t\t_iYawMin = -CS4_NEAR_MAX_YAW;\n\t\t_iYawMax = CS4_NEAR_MAX_YAW;\n\t\t_iVoteingThreshold = CS4_MINIMUM_VOTES;\n\t\t_SearchOffsetMin\t=\t_iOffsetMin;\n\t\t_SearchOffsetMax\t=\t_iOffsetMax;\n\t\t_SearchYawMin\t\t=\t_iYawMin;\n\t\t_SearchYawMax\t\t=\t_iYawMax;\n\t}\n\tinline ~BallotBox(void)\t{\n\t\tSAFE_DELETE_ARRAY(_piBallotBox);\n\t\tSAFE_DELETE_ARRAY(_piVotingTableOffset);\n\t\tSAFE_DELETE_ARRAY(_piVotingTableYaw);\n\t\tSAFE_DELETE_ARRAY(_piVotTablCnt);\n\t\tSAFE_DELETE_ARRAY(_piVotingTableWeight);\n\t}\n//\tBallotBox(int iHeight, int iVotingNumber);\n\tinline unsigned int **data(void)\t{\treturn &_piBallotBox;\t}\n\n\tvoid makeVotingTable(int iHeight, int iWidth, int iImageHeight, int iImageWidth);\n\tvoid clear(void);\n\tvoid initialize();\n\tinline int Height(void)\t{\treturn _iHeight;\t}\n\tinline int VotingNumber(void)\t{\treturn _iVotingNumber;\t}\n\tinline int VotingPointNumber(void)\t{\treturn _iVotingPointNumber;\t}\n\tinline void VotingPointNumber(int iV)\t{\t_iVotingPointNumber = iV;\t}\n\tinline void incVotingPointNumber(void)\t{\t_iVotingPointNumber++;\t}\n\tinline int VotingTableCount(int iIdst)\t{\treturn _piVotTablCnt[iIdst];\t}\n\tinline int VotingTableOffset(int iIdst, int iIdx)\t{\treturn _piVotingTableOffset[VotingNumber() * iIdst + iIdx];\t}\n\tinline int VotingTableYaw(int iIdst, int iIdx)\t{\treturn _piVotingTableYaw[VotingNumber() * iIdst + iIdx];\t}\n\tinline int VotingTableWeight(int iIdst, int iIdx)\t{\treturn _piVotingTableWeight[VotingNumber() * iIdst + iIdx];\t}\n\tinline int NumberOfOffset(void)\t{\treturn _iNumberOfOffset;\t}\n\tinline int NumberOfYaw(void)\t\t{\treturn _iNumberOfYaw;\t\t}\n\tinline int Box(int iOffset, int iYaw)\t{\treturn _piBallotBox[NumberOfYaw() * iOffset + iYaw];\t}\n\tinline int WeightMax(void)\t\t\t{\treturn _iWeightMax;\t\t\t}\n\tinline int MaxYaw(void)\t\t\t{\treturn _iMaxYaw;\t\t\t}\n\tinline int OffsetMin(void)\t{\treturn _iOffsetMin;\t}\n\tinline int OffsetMax(void)\t{\treturn _iOffsetMax;\t}\n\tinline int YawMin(void)\t\t{\treturn _iYawMin;\t\t}\n\tinline int YawMax(void)\t\t{\treturn _iYawMax;\t\t}\n\tinline int SearchOffsetMin(void)\t{\treturn\t_SearchOffsetMin;\t}\n\tinline int SearchOffsetMax(void)\t{\treturn\t_SearchOffsetMax;\t}\n\tinline int SearchYawMin(void)\t{\treturn\t_SearchYawMin;\t}\n\tinline int SearchYawMax(void)\t{\treturn\t_SearchYawMax;\t}\n\tinline void SearchOffsetMin(int iV)\t{\t_SearchOffsetMin\t=\tiV;\t}\n\tinline void SearchOffsetMax(int iV)\t{\t_SearchOffsetMax\t=\tiV;\t}\n\tinline void SearchYawMin(int iV)\t{\t_SearchYawMin\t=\tiV;\t}\n\tinline void SearchYawMax(int iV)\t{\t_SearchYawMax\t=\tiV;\t}\n\n\tinline void Height(int iV)\t{\t_iHeight = iV;\t}\n\tinline void VotingNumber(int iV)\t{\t_iVotingNumber = iV;\t}\n\tinline void VotingTableCount(int iIdst, int iV)\t{\t_piVotTablCnt[iIdst] = iV;\t}\n\tinline void VotingTableOffset(int iIdst, int iIdx, int iV)\t{\t_piVotingTableOffset[VotingNumber() * iIdst + iIdx] = iV;\t}\n\tinline void VotingTableYaw(int iIdst, int iIdx, int iV)\t{\t_piVotingTableYaw[VotingNumber() * iIdst + iIdx] = iV;\t}\n\tinline void VotingTableWeight(int iIdst, int iIdx, int iV)\t{\t_piVotingTableWeight[VotingNumber() * iIdst + iIdx] = iV;\t}\n\tinline void NumberOfOffset(int iV)\t{\t_iNumberOfOffset = iV;\t}\n\tinline void NumberOfYaw(int iV)\t\t{\t_iNumberOfYaw = iV;\t\t}\n\tinline void Box(int iOffset, int iYaw, int iV)\t{\t_piBallotBox[NumberOfYaw() * iOffset + iYaw] = iV;\t}\n\tinline void WeightMax(int iV)\t\t\t{\t_iWeightMax = iV;\t\t\t}\n\tinline void MaxYaw(int iV)\t\t\t{\t_iMaxYaw = iV;\t\t\t}\n\tinline void OffsetMin(int iV)\t{\t_iOffsetMin = iV;\t}\n\tinline void OffsetMax(int iV)\t{\t_iOffsetMax = iV;\t}\n\tinline void YawMin(int iV)\t\t{\t_iYawMin = iV;\t\t}\n\tinline void YawMax(int iV)\t\t{\t_iYawMax = iV;\t\t}\n\tinline int VotingThreshold(void)\t{\treturn _iVoteingThreshold;\t}\n\tinline void VotingThreshold(int iV)\t{\t_iVoteingThreshold = iV;\t}\n\n\tLaneMarkerLines *searchPeak(int iMinOffset, int iMaxOffset, int iMinYaw, int iMaxYaw, int iVotesThreshold);\n\tinline LaneMarkerLines *searchPeak(void)\t{\n\t\treturn searchPeak(SearchOffsetMin(), SearchOffsetMax(), SearchYawMin(), SearchYawMax(), VotingThreshold());\n\t}\n\n};\n"
},
{
"alpha_fraction": 0.6457633376121521,
"alphanum_fraction": 0.6705860495567322,
"avg_line_length": 45.99122619628906,
"blob_id": "4de063b74d5c1742c3521db475b029627719ace7",
"content_id": "2fed13b06bf95ebc9c1471b24688e37fb3e7083a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 5358,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 114,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/my_resource.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "//\n//\tfile:\tmy_resource.h\n// author: krutch_zhou\n// email:[email protected]\n//\t$modified: 2015/06/27\n//\t$Log: my_resource.h,v $\n//\n#ifndef\t_MY_RESOURCE_H_\n#define\t_MY_RESOURCE_H_\n#define ICON_FM 105\n\n#define\tRESOURCE_ID_BASE\tWM_USER\n\n\n#define\tMENU_QUIT\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 0)\n#define\tMENU_PAUSE\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 1)\n\n\n///////////////////////////////////////\n#define\tCW_IDC_BUTTON_REWIND\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 2)\n#define\tCW_IDC_EDIT_FRAMENUM\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 3)\n#define\tCW_IDC_BUTTON_FRAMESET\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 4)\n\n#define\tCW_IDC_BUTTON_PREV2\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 5)\n#define\tCW_IDC_BUTTON_PREV\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 6)\n#define\tCW_IDC_PAUSE\t\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 7)\n#define\tCW_IDC_BUTTON_NEXT\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 8)\n#define\tCW_IDC_BUTTON_NEXT2\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 9)\n\n#define\tCW_IDC_BUTTON_FREEZE\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 10)\n#define\tCW_IDC_BUTTON_RESET\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 11)\n#define\tCW_IDC_BUTTON_RESET_WINDOW\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 12)\n#define\tCW_IDC_QUIT\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 73)\n\n#define\tCW_IDC_EDIT_STOPATNUM\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 13)\n#define\tCW_IDC_BUTTON_STOPAT\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 14)\n#define\tCW_IDC_BUTTON_PAUSEONRESET\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 15)\n\n///////////////////////////////////////\n#define\tCW_IDC_BUTTON_SaveIniFile\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 16)\n#define\tCW_IDC_BUTTON_LoadIniFile\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 17)\n///////////////////////////////////////\n#define\tCW_IDC_BUTTON_saveAsAVI\t\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 18)\n#define\tCW_IDC_BUTTON_toggleBatchModeFlagOfOutputAVI\t\t\t\t\t\t(RESOURCE_ID_BASE + 19)\n#define\tCW_IDC_BUTTON_saveDetectionLog\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 20)\n#define\tCW_IDC_BUTTON_saveLaneMarkerInfo\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 21)\n#define\tCW_IDC_BUTTON_saveLaneParameters\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 22)\n#define\tCW_IDC_BUTTON_saveEvaluationLog\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 23)\n\n\n\n#define\tCW_IDC_BUTTON_DrawNearMidFarLineOnInputImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 24)\n#define\tCW_IDC_BUTTON_DrawFrameNumberOnInputImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 25)\n#define\tCW_IDC_BUTTON_DrawDetectionStatus\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 26)\n#define\tCW_IDC_BUTTON_DrawLaneParametersOnInputImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 27)\n\n#define\tCW_IDC_BUTTON_DrawDetectionTimeOnInputImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 28)\n#define\tCW_IDC_BUTTON_DrawFileNameOnInputImageFunc\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 29)\n#define\tCW_IDC_BUTTON_DrawDetectionModeOnInputImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 30)\n\n///////////////////////////////////////\n\n#define\tCW_IDC_BUTTON_DrawInputImageFunc\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 31)\n#define\tCW_IDC_BUTTON_DrawDisparityOnInputImageFunc\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 32)\n\n#define\tCW_IDC_BUTTON_DrawLaneMarkerPointOnInputImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 33)\n#define\tCW_IDC_BUTTON_DrawBoundaryPointOnInputImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 34)\n#define\tCW_IDC_BUTTON_DrawObserverPointOnInputImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 35)\n\n#define\tCW_IDC_BUTTON_DrawResultOnInputImageFunc\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 36)\n#define\tCW_IDC_BUTTON_DrawResultByNearAreaParameterOnInputImageFunc\t\t\t(RESOURCE_ID_BASE + 77)\n#define\tCW_IDC_BUTTON_DrawCurbOnInputImageFunc\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 37)\n#define\tCW_IDC_BUTTON_DrawAnsTagOnInputImageFunc\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 38)\n\n#define\tCW_IDC_BUTTON_DrawLaneMarkerLineOnInputImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 39)\n#define\tCW_IDC_BUTTON_DrawLaneMarkerLineSequencesOnInputImageFunc\t\t\t(RESOURCE_ID_BASE + 40)\n#define\tCW_IDC_BUTTON_DrawLaneMarkerOnInputImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 41)\n#define\tCW_IDC_BUTTON_DrawLaneBoundaryLineOnInputImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 42)\n\n///////////////////////////////////////\n\n#define\tCW_IDC_BUTTON_DrawRoadImageFunc\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 43)\n#define\tCW_IDC_BUTTON_DrawGridOnRoadImageFunc\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 44)\n#define\tCW_IDC_BUTTON_DisplayRoadWindow\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 45)\n#define\tCW_IDC_BUTTON_DrawRoadImageWithEstimatedPitch\t\t\t\t\t\t(RESOURCE_ID_BASE + 46)\n\n#define\tCW_IDC_BUTTON_DrawLaneMarkerPointOnRoadImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 47)\n#define\tCW_IDC_BUTTON_DrawBoundaryPointOnRoadImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 48)\n#define\tCW_IDC_BUTTON_DrawObserverPointOnRoadImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 49)\n\n#define\tCW_IDC_BUTTON_DrawResultOnRoadImageFunc\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 50)\n\n#define\tCW_IDC_BUTTON_DrawLaneMarkerLineOnRoadImageFunc\t\t\t\t\t\t(RESOURCE_ID_BASE + 51)\n#define\tCW_IDC_BUTTON_DrawLaneMarkerLineSequencesOnRoadImageFunc\t\t\t(RESOURCE_ID_BASE + 52)\n#define\tCW_IDC_BUTTON_DrawLaneMarkerOnRoadImageFunc\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 53)\n#define\tCW_IDC_BUTTON_DrawLaneBoundaryLineOnRoadImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 54)\n\n\n#define\tCW_IDC_BUTTON_SearchWithParameterInNearArea\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 69)\n#define\tCW_IDC_BUTTON_UseFarArea\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 70)\n\n#define\tCW_IDC_BUTTON_UseComplexMode\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 71)\n#define\tCW_IDC_BUTTON_UseSelectLaneBoundaryByComplexMode\t\t\t\t\t(RESOURCE_ID_BASE + 72)\n\n#define\tCW_IDC_BUTTON_DrawSelectedEdgePointOn3DImageFunc\t\t\t\t\t(RESOURCE_ID_BASE + 74)\n#define\tCW_IDC_BUTTON_DrawGridMap1DFunc\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 75)\n\n#define\tCW_IDC_BUTTON_UseBB_DIFF\t\t\t\t\t\t\t\t\t\t\t(RESOURCE_ID_BASE + 76)\n\n///////////////////////////////////////\n\n\n\n#endif\t_MY_RESOURCE_H_\n\n"
},
{
"alpha_fraction": 0.5523178577423096,
"alphanum_fraction": 0.5761589407920837,
"avg_line_length": 17.875,
"blob_id": "b521a49948ffd4805498481ce79b6f0ee38b2788",
"content_id": "13bcc83ff8cfb5f5cc323d3a932c2b569d01a9ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 951,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 40,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/ssd_detection/camera_obj_list.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include <iostream>\n#include <opencv2/core/core.hpp>\n#include <opencv2/imgproc/imgproc.hpp>\n#include <opencv2/highgui/highgui.hpp>\n\nusing namespace std;\n\n///图像目标(单个)属性\nclass camera_obj\n{\npublic:\n ///目标id,与连续跟踪有关\n int id;\n /// 目标稳定性 0;未知 1;稳定 2: 不稳定目标\n int stability;\n ///行人/车辆/ car :7 person: 15 bus: 6\n int classification;\n /// 目标检测跟踪状态 1;未跟踪 2: acc障碍物\n int detection_status;\n ///横向位置\n double lat_pos;\n ///横向速度\n double lat_rate;\n ///纵向位置\n double lon_pos;\n ///纵向速度\n double lon_rate;\n ///宽度,单位:m\n double width;\n ///检测目标的置信度\n float score;\n /// 目标矩形框像素\n cv::Rect box_point;\n /// 1 真实检测值 0 预测值\n\tint32_t real_data;\n ///\n int32_t lifespan;\n\n};\n"
},
{
"alpha_fraction": 0.5987553596496582,
"alphanum_fraction": 0.6165357828140259,
"avg_line_length": 30.245370864868164,
"blob_id": "b628e5e84fddd953c7a35554bb1b4caeed85a7ac",
"content_id": "2b10bd1e5c770432dcf514509078edc4f5a153cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6749,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 216,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneParameter.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include \"../utils/type.h\"\n#include \"../utils/config.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerLineSequence.h\"\n#include \"LaneParameterEstimator.h\"\n\n\nclass LaneParameter\n{\nprivate:\n static const int _iDim = CS4_STATUS_NUMBER;\n BOOL _bAvailable;\n int _iCounterAfterInitialization;\n double _dParameterInit[_iDim];\n double _dParameterMin[_iDim];\n double _dParameterMax[_iDim];\n//\tdouble _dParameter[_iDim];\n\n BOOL _bNotAddSystemNoise;\n\n int _s4_mobs;\n\n double _adb_X_t_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* 1];\n double _adb_X_t_tp[\tCS4_STATUS_NUMBER\t\t\t\t\t* 1];\n double _adb_X_tn_t[\tCS4_STATUS_NUMBER\t\t\t\t\t* 1];\n double _adb_P_t_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_P_t_tp[\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_P_tn_t[\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_R_t[\t\tCS4_OBS_NUMBER\t\t\t\t\t\t* CS4_OBS_NUMBER];\n double _adb_F_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_F_t_T[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_D_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_CONTROL_INPUT_NUMBER];\n double _adb_G_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_G_t_T[\t\tCS4_STOCHASTIC_VARIAVLE_NUMBER\t\t* CS4_STATUS_NUMBER];\n double _adb_K_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_OBS_NUMBER];\n double _adb_U_t[\t\tCS4_CONTROL_INPUT_NUMBER\t\t\t* 1];\n double _adb_H_t[\t\tCS4_OBS_NUMBER\t\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_H_t_T[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_OBS_NUMBER];\n double _adb_Q_t[\t\tCS4_STATUS_NUMBER\t\t\t\t\t* CS4_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_h_t[\t\tCS4_OBS_NUMBER\t\t\t\t\t\t* 1];\n double _adb_y_t[\t\tCS4_OBS_NUMBER\t\t\t\t\t\t* 1];\n int _adb_side_t[\tCS4_OBS_NUMBER\t\t\t\t\t\t* 1];\n double _adb_i_t[\t\tCS4_OBS_NUMBER\t\t\t\t\t\t* 1];\n double _adb_P_0[CS4_STATUS_NUMBER\t\t\t\t\t* CS4_STATUS_NUMBER];\n double _adb_Q_0[CS4_STOCHASTIC_VARIAVLE_NUMBER\t*\tCS4_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_DM_a[2 * 3];\n double _adb_DM_b[2 * 2];\n double _db_delta_t;\n\n // psidash, psi, edash, e rhodash, rho, phi, w\n//\tdouble _adb_X_0[CS4_STATUS_NUMBER * 1];\n//\tdouble _adb_X_m[CS4_STATUS_NUMBER * 1];\n\n//\tdouble _adb_W_m[CS4_STOCHASTIC_VARIAVLE_NUMBER * 1];\n//\tdouble _adb_W_m0[CS4_STOCHASTIC_VARIAVLE_NUMBER * 1];\n//\tdouble _adb_V_m[CS4_OBS_NUMBER * 1];\n //(1)\n double _adb_H_tP_t_tp[\t\t\t\tCS4_OBS_NUMBER\t\t* CS4_STATUS_NUMBER];\n double _adb_H_tP_t_tpH_t_T[\t\tCS4_OBS_NUMBER\t\t* CS4_OBS_NUMBER];\n double _adb_H_tP_t_tpH_t_T_R_t[\tCS4_OBS_NUMBER\t\t* CS4_OBS_NUMBER];\n double _adb_H_tP_t_tpH_t_T_R_t_1[\tCS4_OBS_NUMBER\t\t* CS4_OBS_NUMBER];\n double _adb_P_t_tpHt_T[\t\t\tCS4_STATUS_NUMBER\t* CS4_OBS_NUMBER];\n //(2)\n double _adb_K_tHt[\t\t\t\t\tCS4_STATUS_NUMBER\t* CS4_STATUS_NUMBER];\n double _adb_K_tHtP_t_tp[\t\t\tCS4_STATUS_NUMBER\t* CS4_STATUS_NUMBER];\n //(3)\n double _adb_y_t_h_t[\t\t\t\tCS4_OBS_NUMBER\t\t* 1];\n double _adb_K_ty_t_h_t[\t\t\tCS4_STATUS_NUMBER\t* 1];\n //(4)\n double _adb_G_tQ_t[\t\t\t\t\tCS4_STATUS_NUMBER\t* CS4_STOCHASTIC_VARIAVLE_NUMBER];\n double _adb_G_tQ_tG_t_T[\t\t\t\tCS4_STATUS_NUMBER\t* CS4_STATUS_NUMBER];\n double _adb_F_tP_t_t[\t\t\t\tCS4_STATUS_NUMBER\t* CS4_STATUS_NUMBER];\n double _adb_F_tP_t_tF_t_T[\t\t\tCS4_STATUS_NUMBER\t* CS4_STATUS_NUMBER];\n //(5)\n double _adb_F_tX_t_t[\t\t\t\tCS4_STATUS_NUMBER\t* 1];\n double _adb_D_tU_t[\t\t\t\t\tCS4_STATUS_NUMBER\t* 1];\n\npublic:\n inline LaneParameter()\n {\n _bAvailable = FALSE;\n _iCounterAfterInitialization = 0;\n\n setupParameterInit(NULL);\n setupParameterMinMax(NULL);\n _db_delta_t = 0.1;\n initialize();\n }\n inline LaneParameter(PARAM_CAM *p)\n {\n _bAvailable = FALSE;\n _iCounterAfterInitialization = 0;\n\n setupParameterInit(p);\n setupParameterMinMax(p);\n\n _db_delta_t = 0.1;\n initialize();\n }\n inline ~LaneParameter()\t{\t}\n\n void setupParameterInit(PARAM_CAM *p);\n\n void initialize(void);\n//\tinline void initialize(void) {\n//\t\tfor(int iIdx = 0; iIdx < Dim(); iIdx++) {\n//\t\t\t_adb_X_t_t[iIdx] = _dParameterInit[iIdx];\n//\t\t}\n//\t}\n void setupParameterMinMax(PARAM_CAM *p);\n inline int Dim(void)\n {\n return _iDim;\n }\n inline BOOL Available(void)\n {\n return _bAvailable;\n }\n inline void Available(BOOL bV)\n {\n _bAvailable = bV;\n }\n inline int getCounterAfterInitialization(void)\n {\n return _iCounterAfterInitialization;\n }\n inline void clearCounterAfterInitialization(void)\n {\n _iCounterAfterInitialization = 0;\n }\n inline void incCounterAfterInitialization(void)\n {\n _iCounterAfterInitialization++;\n }\n inline double Param(int iIdx)\n {\n return _adb_X_t_t[iIdx];\n }\n inline double ParamInit(int iIdx)\n {\n return _dParameterInit[iIdx];\n }\n inline void ParamInit(int iIdx, double dV)\n {\n _dParameterInit[iIdx] = dV;\n }\n inline double ParamMin(int iIdx)\n {\n return _dParameterMin[iIdx];\n }\n inline void ParamMin(int iIdx, double dV)\n {\n _dParameterMin[iIdx] = dV;\n }\n inline double ParamMax(int iIdx)\n {\n return _dParameterMax[iIdx];\n }\n inline void ParamMax(int iIdx, double dV)\n {\n _dParameterMax[iIdx] = dV;\n }\n double LaneBoundaryPositionOnRoad(int iK, double dZ);\n double LaneBoundaryPositionOnRoad(int iK, double dZ, double *pdLaneParameter);\n double LaneBoundaryPositionOnImagePixel(PARAM_CAM *p, int iK, int iSrc);\n inline BOOL getNotAddSystemNoise(void)\n {\n return _bNotAddSystemNoise;\n }\n inline void setNotAddSystemNoise(void)\n {\n _bNotAddSystemNoise = TRUE;\n }\n inline void clearNotAddSystemNoise(void)\n {\n _bNotAddSystemNoise = FALSE;\n }\n\n\n//\tLaneParameterEstimator();\n//\t~LaneParameterEstimator();\n\n double *getX_t_t(void)\n {\n return _adb_X_t_t;\n }\n void calc_DM_a_b(void);\n void get_U_t(void);\n void update_t(void);\n//\tvoid set_y_t(void);\n//\tvoid set_y_t(int s4_side, int s4_iIdx);\n void set_y_t(int s4_side, LaneMarkerPoint *pLMP);\n BOOL calc_Ht(PARAM_CAM *pParamCam);\n void calc_Ft(void);\n void calc_D_t(void);\n void calc_G_t(void);\n void calc_Kt(void);\n void calc_Kt2(void);\n void calc_P_t_t(void);\n void calc_h_t(PARAM_CAM *pParamCam);\n double get_HX_t_DM(PARAM_CAM *pParamCam, S4 s4_a_isrc, S4 s4_a_side);\n void calc_X_t_t(void);\n void calc_P_tn_t(void);\n void calc_X_tn_t(void);\n void calc_P_0(void);\n void calc_Q_t(void);\n void calc_Q_0(void);\n void calc_R_t(void);\n\n\n int update(PARAM_CAM *pParamCam, LaneMarkerPoints *pLeftBoundaryPoints, LaneMarkerPoints *pRightBoundaryPoints);\n unsigned long isInvalidWithMinAndMax(void);\n\n};\n"
},
{
"alpha_fraction": 0.5710116624832153,
"alphanum_fraction": 0.5849546194076538,
"avg_line_length": 27.03636360168457,
"blob_id": "959a35a95c049c5335e6c7aafba5ddd9fc045d42",
"content_id": "f0c92af3fd3c69a0b0a1bc0b1c07a5918e9ea1d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3332,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 110,
"path": "/athena/examples/LCM/Singlecar/obu/src/fam/oam/alarm/nad_ui_alarm.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_ui_alarm.cpp\n * 创建者:张毅00151602\n * 时 间:2016-03-02\n * 描 述:向OCT和VUI发告警\n-------------------------------------------------------*/\n\n//头文件\n#include \"nad_ui_alarm.h\"\n\n\n#if defined(_NAD_CSU_) || defined(_NAD_RSU_) || defined(_NAD_OBU_)\n\n//全局告警数量抑制\nstatic map<string, int64_t> g_alarm_list;\n\n//判断是不是重复告警\nstatic bool alarm_too_busy(string mask, int64_t ttl)\n{\n //ttl==0表示不过滤告警\n if (ttl == 0)\n {\n return false;\n }\n\n //查找上次的告警\n int64_t now = get_current_time();\n map<string, int64_t>::iterator it;\n it = g_alarm_list.find(mask);\n if (it != g_alarm_list.end() && now < (it->second + ttl))\n {\n return true;\n }\n\n //保存当前告警\n g_alarm_list[mask] = now;\n return false;\n}\n\n//基本参数的赋值\n#define ALARM_SET_PARA(msg) \\\n msg.alarm_proc = PROC_TYPE; \\\n msg.alarm_time = current_datetime_str(); \\\n msg.alarm_level = alarm_level; \\\n msg.alarm_type = ALARM_TYPE_OCCUR; \\\n msg.alarm_no = 0; \\\n msg.alarm_show = ALARM_SHOW_TEXT_SOUND; \\\n msg.alarm = alarm; \\\n if (mask == \"\") mask = alarm;\n\n#endif\n\n#ifdef _NAD_CSU_\n\n//向oct发告警,封装了cu_alarm_report\nvoid alarm_report_to_oct(string oct_name, int32_t alarm_level, string alarm, string mask, int64_t ttl)\n{\n ne_msg_t<cu_alarm_report> msg(oct_name, \"cu_alarm_report\");\n ALARM_SET_PARA(msg.body);\n if (alarm_too_busy(mask, ttl)) return;\n //g_lcm->publish_nemsg(msg);\n g_csu_zmq->send_nemsg<cu_alarm_report>(msg);\n LOG_SEND(log_cu_alarm_report(&msg));\n LOG(INFO) << \"alarm_report_to_oct(\" << oct_name << \"): \" << alarm;\n}\n\n#endif\n\n#ifdef _NAD_RSU_\n\n//向oct发告警,封装了rc_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_oct(int32_t alarm_level, string alarm, string mask, int64_t ttl)\n{\n\n ne_msg_t<rc_alarm_report> msg(g_config.csu.name, \"rc_alarm_report\");\n ALARM_SET_PARA(msg.body);\n if (alarm_too_busy(mask + \"/oct\", ttl)) return;\n //g_lcm->publish_nemsg(msg);\n g_rsu_zmq->send_nemsg<rc_alarm_report>(msg);\n LOG_SEND(log_rc_alarm_report(&msg));\n LOG(INFO) << \"alarm_report_to_oct: \" << alarm;\n}\n\n//向vui发告警,封装了ro_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_vui(string obu_name, int32_t alarm_level, string alarm, string mask, int64_t ttl)\n{\n ne_msg_t<ro_alarm_report> msg(obu_name, \"ro_alarm_report\");\n ALARM_SET_PARA(msg.body);\n if (alarm_too_busy(mask + \"/\" + obu_name, ttl)) return;\n //g_lcm->publish_nemsg(msg);\n g_rsu_zmq->send_nemsg<ro_alarm_report>(msg);\n LOG_SEND(log_ro_alarm_report(&msg));\n LOG(ERROR) << \"alarm_report_to_vui(\" << obu_name << \"): \" << alarm;\n}\n#endif\n\n#ifdef _NAD_OBU_\n\n//向vui发告警,封装了ou_alarm_report,返回自动分配的alarm_no\nvoid alarm_report_to_vui(int32_t alarm_level, string alarm, string mask, int64_t ttl)\n{\n ou_alarm_report msg;\n ALARM_SET_PARA(msg);\n if (alarm_too_busy(mask, ttl)) return;\n g_lcm->publish(\"ou_alarm_report\", &msg);\n LOG_SEND(log_ou_alarm_report(&msg));\n LOG(INFO) << \"alarm_report_to_vui: \" << alarm;\n}\n\n#endif\n"
},
{
"alpha_fraction": 0.5389947891235352,
"alphanum_fraction": 0.5623916983604431,
"avg_line_length": 18.89655113220215,
"blob_id": "10bda6591c9a75b8f87430cee9902da47f55fd26",
"content_id": "fd3fb5a39e4f5d6352d41cd6b1425a27f9ec124e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1564,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 58,
"path": "/athena/core/x86/Common/include/distributed_runtime/info/nad_speed.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_speed.h\n * 时 间:2016-10-05\n * 描 述:管理速度\n-------------------------------------------------------*/\n#ifndef _NAD_SPEED_H\n#define _NAD_SPEED_H\n#include \"nad_type.h\"\n\n//速度优先级\n#define SV_LOW 0 //低优先级,强烈建议都用SV_LOW优先级\n#define SV_HIGH 1 //高优先级,如果存在SV_HIGH的速度值,则忽略所有SV_LOW\n\n//速度值类型\n#define SV_ABSOLUTE 0 //绝对速度值\n#define SV_RELATE 1 //相对速度,如-2=速度减2km/h,可叠加\n\n//一个速度值\nclass nad_speed_value\n{\npublic:\n string name; //为这个速度值命名\n int priority; //SV_LOW/V_HIGH,如果存在SV_HIGH的速度值,则忽略所有SV_LOW\n int type; //SV_ABSOLUTE=绝对速度值,SV_RELATE=相对速度,如-2=速度减2km/h,可叠加\n double speed; //速度值,单位: km/h\n int64_t ttl; //默认是0=仅本周起有效,5000表示未来5秒都有效,单位:毫秒\n};\n\n\n//速度管理\nclass nad_speed\n{\npublic:\n double speed; //速度值,单位: km/h\n\n //所有的速度值\n vector<nad_speed_value> speed_list;\n\npublic:\n //构造析构函数\n nad_speed();\n virtual ~nad_speed();\n\n //添加一个速度值,返回综合速度\n double add(string name, int priority, int type, double speed, int64_t ttl = 0);\n\n //查找一个速度\n nad_speed_value *find(string name);\n\n //清空所有的速度\n void clear();\n\n //打印速度值\n void show();\n};\n\n\n#endif\n"
},
{
"alpha_fraction": 0.6786516904830933,
"alphanum_fraction": 0.6966292262077332,
"avg_line_length": 19.227272033691406,
"blob_id": "406ce9486b694ad9476f3c1e23ea048491ebedab",
"content_id": "bd9b1ac7da33694dc43a4f1c98ada6ce8dfbdb3b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 445,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 22,
"path": "/athena/core/arm/Control/include/common/kalman_filter_app.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * Test for the KalmanFilter class with 2D projectile motion.\n *\n * @author: chu\n * @date: 2016.9.11\n */\n#include <fstream>\n#include <cmath>\n\n\nint kalman_filter_initial();\n\ndouble kalman_filter_update(double speed, double acc);\n\nclass kalman_filter_app\n{\n private:\n KalmanFilter *pkalman_filter;\n public:\n kalman_filter_app(double param_dt, double param_Q, double param_R);\n double kalman_filter_update(double param_data);\n};\n"
},
{
"alpha_fraction": 0.7007481455802917,
"alphanum_fraction": 0.7007481455802917,
"avg_line_length": 19.049999237060547,
"blob_id": "efcc2be4fc30b42b94f056054d273cda6b9522e5",
"content_id": "c8d12e55dcc2db56c4bafa2cc6014091df16514d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 401,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 20,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/Lane.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <stdio.h>\n#include <vector>\n#include \"BaseDefine.h\"\n\n#ifndef _LANE_H\n#define _LANE_H\n\n// Detect regions\ntypedef struct _DET_REGION\n{\n CvPoint left_top;\n CvPoint right_down;\n} DET_REGION;\n\nint Lanedetection(cv::Mat& Input, cv::Mat& Roadimage, cv::Mat& Lane_output, double &end_fps_time);\nint Lanedetection(cv::Mat& Input, cv::Mat& Roadimage, cv::Mat& Lane_output);\n\n#endif\n"
},
{
"alpha_fraction": 0.6850588917732239,
"alphanum_fraction": 0.690638542175293,
"avg_line_length": 26.3389835357666,
"blob_id": "eab288bee5347ecd006dabc37141e059b0de1ce3",
"content_id": "85eccbe8e4afcb16a23935f9790fdd7bf4a03922",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1626,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 59,
"path": "/athena/core/arm/Map/include/LaneletMap.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n *\n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include \"Lanelet.hpp\"\n#include \"LLTree.hpp\"\n#include \"LaneletGraph.hpp\"\n\nnamespace LLet\n{\n\nstruct NoPath\n{\n lanelet_ptr_t start;\n lanelet_ptr_t dest;\n};\n\nclass LaneletMap\n{\npublic:\n LaneletMap( std::vector< lanelet_ptr_t > lanelets );\n LaneletMap( std::string filename );\n\n std::vector< lanelet_ptr_t > query( const BoundingBox& box );\n std::vector< lanelet_ptr_t > shortest_path( const lanelet_ptr_t& start, const lanelet_ptr_t& dest ) const;\n\n const lanelet_ptr_t& lanelet_by_id( int64_t id ) const;\n\n const Graph& graph() const;\n Graph& get_graph();///< 获得地图拓扑\n\n const std::vector< lanelet_ptr_t > _lanelets;\n const std::vector< regulatory_element_ptr_t> _regulatory_element;\n\nprivate:\n void init();\n LLTree _lanelet_tree;\n LLet::Graph _graph;\n int64_t vertex_id_by_lanelet( const lanelet_ptr_t& lanelet) const;\n};\n\n}\n"
},
{
"alpha_fraction": 0.6859781742095947,
"alphanum_fraction": 0.6901763081550598,
"avg_line_length": 14.84000015258789,
"blob_id": "82d1aeadc7021dab7fab0fe5fdd6d990f905fb0a",
"content_id": "9af3f051ccee82a2f767efa969dce42fcb5f98f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1241,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 75,
"path": "/athena/core/x86/Camera/vision_ssd_detect/kalman/kalmanfilter.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/***************By yuanjun**************************/\n#pragma once\n#include \"matrix.h\"\n#include \"../include/camera_obj_list.hpp\"\n\nusing namespace std;\n\ntypedef struct _X_state\n{\n\tfloat x_position;\n\tfloat z_positon;\n\tfloat x_speed;\n\tfloat z_speed;\n\n}ObjectState;\n\ntypedef struct _Z_measurement\n{\n\tfloat x_measurement;\n\tfloat z_measurement;\n}Measurement;\n\nclass KalmanFilter\n{\npublic:\n\n CMatrix A;//X(k)=A X(k-1)+W(k) //A系统参数\n\n CMatrix H; //Z(k)=H X(k)+V(k) //测量系统参数\n\n\tCMatrix X1;\n\n\tCMatrix X2;\n\n\tCMatrix Z; //Z(k)是k时刻的测量值\n\n\tCMatrix K;\n\n\tCMatrix Q; //过程噪声\n\n\tCMatrix R; //测量噪声\n\n\tCMatrix P1; //Estimate error\n\n\tCMatrix P2;\n\n\tCMatrix CoordinateRoll;\n\n\tdouble angle;\npublic:\n\tKalmanFilter();\n\tKalmanFilter(camera_obj &single_obj,double rollAngle);\n\n\t~KalmanFilter(void);\n\n\tvoid InitialKalmanFilter(camera_obj &single_obj);\n\n\tvoid timeUpdate();\n\n\tvoid stateUpdate(camera_obj &single_obj,double rollangle,double *pos);\n\n\tObjectState GetCurrentState();\n\n\tObjectState GetPredictState();\n\n\tMeasurement GetPredictMeasurement(double rollAngle,double *pos);\n\n\tCMatrix CalMeasureDeviation();\n\n\tKalmanFilter& operator = (const KalmanFilter& anotherKF);\n\n\tvoid CalCoorRoll(double angle);\n\n\n};\n\n\n\n"
},
{
"alpha_fraction": 0.5208004117012024,
"alphanum_fraction": 0.5387045741081238,
"avg_line_length": 16.747663497924805,
"blob_id": "73b22a9577b1847268dec23afa06702960e9bc68",
"content_id": "9c52be6250d56fe6cbad0437bd8a2ce00414c4aa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2877,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 107,
"path": "/athena/core/x86/Planning/include/common/rect.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 表示一个矩形栅格的属性和相关计算。\n */\n\n#pragma once\n\n#include <math.h>\n#include <vector>\n#include \"point.h\"\n\n/**\n * @class rect\n * @brief 矩形栅格类。\n */\nclass rect\n{\npublic:\n float width; ///<矩形栅格的宽度\n float height; ///<矩形栅格的长度\n float x,y; ///< 左上角点坐标left-top\n point p1, p2, p3, p4; ///<矩形栅格的四个顶点\n\npublic:\n /**\n * @brief 构造函数\n */\n rect()\n {\n x=y=0;\n width=height=0;\n }\n /**\n * @brief 析构函数\n */\n ~rect()\n {\n\n }\n\n /**\n * @brief 绝对坐标系下,判断一个点是否落在该矩形栅格内。\n * @param p 输入量:点的坐标(绝对坐标系坐标)。\n * @return 1表示点落在矩形栅格内,0表示没有。\n */\n bool is_point_in(point& p);\n\n /**\n * @brief 传感器相对坐标系下,判断一个点是否落在该矩形栅格内。\n * @param p 输入量:点的坐标(绝对坐标系坐标)。\n * @return 1表示点落在矩形栅格内,0表示没有。\n */\n bool is_point_in_xx(point& p);\n\n /**\n * @brief 绝对坐标系下,判断一个矩形栅格是否与该矩形栅格有交叠。\n * @param r 输入量:输入的矩形栅格(绝对坐标系坐标)。\n * @return 1表示有交叠,0表示没有。\n */\n bool cross_rect(rect r);\n\n /**\n * @brief 传感器相对坐标系下,判断一个矩形栅格是否与该矩形栅格有交叠。\n * @param r 输入量:输入的矩形栅格(绝对坐标系坐标)。\n * @return 1表示有交叠,0表示没有。\n */\n bool cross_rect_xx(rect r);\n\n /**\n * @brief 对等于号=进行重载,rect类的等号操作符。\n * @param src 输入量:原始输入矩形栅格。\n * @return 被赋值的矩形栅格。。\n */\n rect& operator= (const rect& src)\n {\n\n this->p1 = src.p1;\n this->p2 = src.p2;\n this->p3 = src.p3;\n this->p4 = src.p4;\n\n this->width = src.width;\n this->height = src.height;\n this->x = src.x;\n this->y = src.y;\n\n return *this;\n\n }\n};\n\n/**\n* @brief 绝对坐标系下,判断两个矩形栅格是否有交叠。\n* @param r1 输入量:第一个矩形栅格(绝对坐标系坐标)。\n* @param r2 输入量:第二个矩形栅格(绝对坐标系坐标)。\n* @return 1表示有交叠,0表示没有。\n*/\nbool cross_in_two_rect(rect& r1, rect& r2);\n\n/**\n* @brief 传感器相对坐标系下,判断两个矩形栅格是否有交叠。\n* @param r1 输入量:第一个矩形栅格(绝对坐标系坐标)。\n* @param r2 输入量:第二个矩形栅格(绝对坐标系坐标)。\n* @return 1表示有交叠,0表示没有。\n*/\nbool cross_in_two_rect_xx(rect& r1, rect& r2);\n"
},
{
"alpha_fraction": 0.5512688159942627,
"alphanum_fraction": 0.5586960911750793,
"avg_line_length": 19.27614974975586,
"blob_id": "310aacab5822880142369b598c445a7e66860f94",
"content_id": "b3c75ef37676f6970866543bdfa002e7295b3528",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6799,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 239,
"path": "/athena/core/arm/Planning/include/planning/route_data.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 导航模块的公用数据\n * 包含对点、规划模块数据等描述,地图切片、路口红绿灯、停车泊车点等信息\n */\n\n\n#ifndef _ROUTE_DATA_H\n#define _ROUTE_DATA_H\n\n#include <stdint.h>\n#include <string>\n#include <vector>\n\n#include \"common/rect.h\"\n\n/**\n* @class point_xys\n* @brief 形点边界线(专供planning使用)\n* 约束可行驶区域,道路边界等\n*/\nclass point_xys\n{\npublic:\n /**\n * @brief 构造函数\n */\n point_xys();\n /**\n * @brief 析构函数\n */\n ~point_xys();\n\n /**\n * @brief 对等于号=进行重载,point_xys类的等号操作符。\n * @param src 输入量:原始输入点。\n * @return 被赋值的point_xys信息。。\n */\n point_xys &operator=(const point_xys& src);\n\npublic:\n int8_t type; ///<点类型,依业务类型而定\n float x; ///<x坐标,单位m\n float y; ///<y坐标,单位m\n};\n\n/**\n* @class point_m\n* @brief 路径下发消息\n* 中路中心上的点(专供motion使用)\n*/\nclass point_m\n{\npublic:\n /**\n * @brief 构造函数\n */\n point_m();\n /**\n * @brief 析构函数\n */\n ~point_m();\n\n /**\n * @brief 对等于号=进行重载,point_m类的等号操作符。\n * @param src 输入量:原始输入点。\n * @return 被赋值的point_m信息。。\n */\n point_m &operator=(const point_m& src);\n\npublic:\n int8_t type; ///<点类型: KP_NONE=禁行点, KP_NORMAL=一般点, KP_CHANGE_LANE_LEFT=向左换道点, KP_CHANGE_LANE_RIGHT=向右换道点\n float x; ///<x坐标,单位m\n float y; ///<y坐标,单位m\n float yaw; ///<道路头指向(相对正北的夹角)\n float k; ///<道路曲率,单位deg/m\n float mileage; ///<相对起点的里程,单位m\n float width; ///<道路宽度,单位m\n int8_t sug_speed;///<推荐速度,单位km/h\n};\n\n/**\n* @class section_m\n* @brief 以左侧第一车道头指向的垂线截取的道路切片(专供motion使用)。\n*/\nclass section_m\n{\npublic:\n /**\n * @brief 构造函数\n */\n section_m();\n /**\n * @brief 析构函数\n */\n ~section_m();\n\n /**\n * @brief 对等于号=进行重载,section_m类的等号操作符。\n * @param src 输入量:原始输入的section_m信息。\n * @return 被赋值的section_m信息。。\n */\n section_m &operator=(const section_m& src);\n\npublic:\n int8_t num_of_lane; ///<切片上的道路数量\n std::vector< point_m > lane; ///<每个切片上的道路中点\n};\n\n/**\n* @class line_xys\n* @brief 形点边界线(专供planning使用)\n* 约束可行驶区域,道路边界等\n*/\nclass line_xys\n{\npublic:\n /**\n * @brief 构造函数\n */\n line_xys();\n /**\n * @brief 析构函数\n */\n ~line_xys();\n\n /**\n * @brief 对等于号=进行重载,line_xys类的等号操作符。\n * @param src 输入量:原始输入的line_xys信息。\n * @return 被赋值的line_xys信息。\n */\n line_xys& operator= (const line_xys& src);\n\npublic:\n int32_t num_of_points; ///<点的数量\n std::vector<point_xys> line; ///<点集\n};\n\n\n/**\n* @class route_planning_m\n* @brief 路径规划(专供planning使用)\n* 包含导航规划所经过的车道序列以及关键点等\n*/\nclass route_planning_m\n{\npublic:\n /**\n * @brief 构造函数\n */\n route_planning_m();\n /**\n * @brief 析构函数\n */\n ~route_planning_m();\n\n /**\n * @brief 对等于号=进行重载,route_planning_m类的等号操作符。\n * @param src 输入量:原始输入的route信息。\n * @return 被赋值的route信息。。\n */\n route_planning_m &operator=(const route_planning_m& src);\n\n /**\n * @brief 清零所有属性\n */\n void route_clear();\n\n /**\n * @brief 判断一个点是否在可行驶区域内(两条边界线之间)。\n * @param src 输入量:待判断的点\n * @param _begin 输入量:边界的起始位置\n * @param _end 输入量:边界的终止位置\n * @return 返回1表示在边界线之内,0表示不在。\n */\n bool check_point_in_lane_edge( const point_xys src, const int _begin, const int _end );\n\n /**\n * @brief 判断一个矩形框是否在可行驶区域内(矩形框任意一个顶点在两条边界线之间)。\n * @param rt 输入量:待判断的矩形框\n * @param _begin 输入量:边界的起始位置\n * @param _end 输入量:边界的终止位置\n * @return 返回1表示在边界线之内,0表示不在。\n */\n bool check_rect_in_lane_edge( const rect rt, const int _begin, const int _end );\n\npublic:\n int32_t replan_flag; ///<重规划标志位\n //以车辆当前位置(cur_section)为原点,分段规划信息\n int64_t time_stamp; ///<产生此路径规划的时间,gettimeofday获得的毫秒数\n std::string destination; ///<规划的目的地\n int32_t route_reason; ///<规划原因\n float mileage_pass; ///<相对车过去的里程,单位m,默认500m\n float mileage_next; ///<相对车未来的里程,单位m,默认1000m\n float mileage_start; ///<距离起点的里程,单位m\n float mileage_stop; ///<距离终点的里程,单位m\n //可行驶区域 left_edge;right_edge;需要在地图中指明\n line_xys left_edge; ///<左边界线,超出此线可能撞马路牙子\n line_xys right_edge; ///<右边界线,超出此线可能撞马路牙子\n line_xys left_line; ///<最左车道的左边线,超出此线可能逆行\n line_xys right_line; ///<最右车道的右边线,通常right_line和right_edge之间为停车带\n //中线:按车辆行驶方向的每个切片的中点\n int32_t num_of_section; ///<这段道路上的切片数量\n std::vector< section_m > line; ///<切片列表\n int32_t cur_section; ///<规划时车在哪个切片上\n};\n\n/**\n* @class TrafficLights\n* @brief 路口红绿灯信息\n*/\nclass TrafficLights\n{\npublic:\n /**\n * @brief 构造函数\n */\n TrafficLights();\n /**\n * @brief 析构函数\n */\n ~TrafficLights();\n\n /**\n * @brief 对等于号=进行重载,TrafficLights类的等号操作符。\n * @param src 输入量:原始输入的红绿灯信息。\n * @return 被赋值的红绿灯信息。。\n */\n TrafficLights &operator=(const TrafficLights& src);\n\npublic:\n int64_t light_status_; ///<红绿灯状态,0:无效值,1:绿,2:黄,3:红\n\n int8_t crossing_status_; ///<路口状态。0:非路口,1:路口\n};\n\n\n#endif //_ROUTE_DATA_H\n\n"
},
{
"alpha_fraction": 0.701373815536499,
"alphanum_fraction": 0.7057121992111206,
"avg_line_length": 31.162790298461914,
"blob_id": "19bcc0664d00d4744957a6a2caa76907c6160f2b",
"content_id": "8da714d2a9bb5822ece8cab79b0b5c54363be2b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1383,
"license_type": "no_license",
"max_line_length": 181,
"num_lines": 43,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerPoints.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerPoint.h\"\n\nclass LaneMarkerPoints\n{\nprivate:\n FlexArray<ptrLaneMarkerPoint>\t*_faLaneMarkerPoints;\n double _dAverageEdgeStrength;\npublic:\n LaneMarkerPoints(void);\n LaneMarkerPoints(LaneMarkerPoints *pSrc);\n ~LaneMarkerPoints(void);\n LaneMarkerPoint *getLaneMarkerPoint(int idx);\n void deleteLaneMarkerPoint(void);\n void deleteLaneMarkerPoint(int iIdx);\n int getLaneMarkerPointNumber(void);\n void addLaneMarkerPoint(int iIsrc, int iJsrc, double dEdgeStrength, double dGrandient, int iProcLineIndex, double dDisparity, double dX = 0.0, double dY = 0.0, double dZ = 0.0);\n void addLaneMarkerPoint(LaneMarkerPoint *pLMP);\n inline void remove_delete(int iIdx)\n {\n _faLaneMarkerPoints->remove_delete(iIdx);\n }\n inline void clear_reset(void)\n {\n _faLaneMarkerPoints->clear();\n _faLaneMarkerPoints->reset();\n }\n inline void reset(void)\n {\n _faLaneMarkerPoints->reset();\n }\n void set(int iIdx, LaneMarkerPoint *pLaneMarkerPoint);\n BOOL calcCentroid(double *pdCentroid);\n BOOL doPCA(double *pdAxis, double *pdVar);\n BOOL findMinMax(double *pdMinMax);\n inline double AverageEdgeStrength(void)\n {\n return _dAverageEdgeStrength;\n }\n void calcAverageEdgeStrength(BOOL bForce = FALSE);\n};\n"
},
{
"alpha_fraction": 0.5253623127937317,
"alphanum_fraction": 0.554347813129425,
"avg_line_length": 26.600000381469727,
"blob_id": "9031fdd08f0869f8f8d0eca9ee68f73c0dbd3565",
"content_id": "a6cc5d965d7f4c43a75d06c9b1a191705cfc4f93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1104,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 40,
"path": "/athena/cc/camera/lane_detect/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <dlfcn.h>\n#include <stdlib.h>\n#include <iostream>\n#include \"lane_utils.h\"\n\nusing namespace std;\nusing namespace cv;\n\nint main()\n{\n\tinit();\n\tcv::Mat Input(720, 1280, CV_8UC3);\n\tcv::Mat Output(720, 1280, CV_8UC3);\n const int bufSize = 256;\n char Base_name[bufSize];\n string file_path = \"/home/zombie/dcw/gj_new/gjphoto\";\n cout << \"input the first image i:\" << endl;\n int picture_ID = 1;\n while (cin >> picture_ID)\n {\n for (; picture_ID < 20000; picture_ID++)\n {\n sprintf(Base_name, \"%06d.png\", picture_ID);\n string image_path = file_path + \"/\" + Base_name;\n //cout << image_path << endl;\n string image_path_out = file_path + \"/\" + Base_name;\n Input = cv::imread(image_path, 1);\n setImage(Input);\n Output = getResultImage();\n imshow(\"out\", Output);\n cvWaitKey(1);\n\n int lane_quality;\n vector<point> leftPoints, rightPoints;\n getLaneInfos(&lane_quality, leftPoints, rightPoints);\n }\n }\n return 0;\n}\n"
},
{
"alpha_fraction": 0.7865992784500122,
"alphanum_fraction": 0.7939372062683105,
"avg_line_length": 51.27043914794922,
"blob_id": "8be571367e9ae68b485cff0bbc80d2fdd8284520",
"content_id": "6510966bea4c7185e176f7bd297086888d50a824",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 8379,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 159,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneDetectorTools.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "BIG5",
"text": "#pragma once\n#include \"LaneMarkerPoints.h\"\n#include \"../utils/tmc_stereobmp-forMono.h\"\n#include \"LaneParameterEstimator.h\"\n\nextern double getNEAR_BASEY(void);\nextern double getNEARHEIGHT(void);\nextern double getNEAR_TOPY(void);\nextern double getFAR_BASEY(void);\nextern double getFARHEIGHT(void);\nextern double getFAR_TOPY(void);\nextern double getFARAREA_TOPY(void);\n\nextern void setNEAR_BASEY(double v);\nextern void setNEARHEIGHT(double v);\nextern void setNEAR_TOPY(double v);\nextern void setFAR_BASEY(double v);\nextern void setFARHEIGHT(double v);\nextern void setFAR_TOPY(double v);\nextern void setFARAREA_TOPY(double v);\n\nextern int getPositiveThresholdDefault(void);\nextern int getNegativeThresholdDefault(void);\nextern void setPositiveThresholdDefault(int iV);\nextern void setNegativeThresholdDefault(int iV);\nextern int getLaneBoundaryPointsNumberForParamameterEstimation(void);\nextern void setLaneBoundaryPointsNumberForParamameterEstimation(int iV);\n\nextern double calcZvehicleFromIsrc(PARAM_CAM *pCamParam, int iIsrc);\nextern double calcZvehicleFromIsrcByNearAreaParameter(PARAM_CAM *pCamParam, int iIsrc);\nextern int calcHorizontalLineOfDepth(PARAM_CAM *pCamParam, double dZroad);\nextern int calcHorizontalLine(PARAM_CAM *pCamParam);\nextern double calcDepthOfHorizontalLine(PARAM_CAM *pCamParam, int iIsrc);\nextern double getWidthOfPixel(PARAM_CAM *pCamParam, int iIsrc);\nextern double getWidthOfPixelByNearAreaParameter(PARAM_CAM *pCamParam, int iIsrc);\nextern BOOL detectLaneMarkerPoints(PARAM_CAM *pCamParam, Uchar *pImage, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n\nextern BOOL calc3DPlane(LaneMarkerPoints *pLMPs, double *pa, double *pc, double *pd);\nextern BOOL calc3DPlaneThroughOrigin(LaneMarkerPoints *pLMPs, double *pa, double *pc, double *pd);\nextern BOOL calc3DSurface(LaneMarkerPoints *pLMPs, double *pdalpha, double *pdcv, double *pdgamma, double *pdd);\nextern BOOL calc3DSurfaceWithoutGamma(LaneMarkerPoints *pLMPs, double *pdalpha, double *pdcv, double *pdd);\nextern BOOL judgeLaneMarkerPointAboveGround(PARAM_CAM *pCamParam, double *pdPlane3D, double dTh, LaneMarkerPoints *pLaneMarkerPoints);\n\nextern BOOL calcPosImageFromPosRoad(PARAM_CAM *pCamParam, int a_iIroad, int a_iJroad, double *a_pdIimage, double *a_pdJimage);\nextern BOOL calcPosImageFromPosRoad(PARAM_CAM *pCamParam, double *pdPlane3D, int a_iIroad, int a_iJroad, double *a_pdIimage, double *a_pdJimage);\nextern BOOL calcPosRoadFromPosImage(PARAM_CAM *pCamParam, int a_iIimage, int a_iJimage, double *a_pdIRoad, double *a_pdJRoad);\nextern double calcIimageFromUimage(PARAM_CAM *pCamParam, DB dUimage);\nextern double calcJimageFromVimage(PARAM_CAM *pCamParam, DB dVimage);\n\nextern BOOL calcProjectionTo3DPlane(PARAM_CAM *pCamParam, double *pdPlane3D, double *pdSrc, double *pdDst);\nextern BOOL calcPosImageFromPosVehicle(PARAM_CAM *pCamParam, double dXVehicle, double dYVehicle, double dZVehicle, int *piIsrc, int *piJsrc);\n\n\nvoid transformInputToRoad(PARAM_CAM *pCamParam, int iIsrc, int iJsrc, double *pdZVehicle, double *pdXVehicle);\n\n//////////////////////////////////////////////////////////////////////////\nconst int LR_NUM\t=\t2;\nconst int LR_LEFT\t=\t0;\nconst int LR_RIGHT\t=\t1;\n\nconst int NF_NUM\t=\t2;\nconst int NF_NEAR\t=\t0;\nconst int NF_FAR\t=\t1;\n\nconst int UD_NUM\t=\t2;\nconst int UD_UP\t\t=\t0;\nconst int UD_DOWN\t=\t1;\n\nconst int LBT_NONE\t\t\t\t\t= 0;\nconst int LBT_LANEMARKERLINE\t\t= 1;\nconst int LBT_LANEMARKER\t\t\t= 2;\nconst int LBT_LANEMARKERLINE_TRACK\t= 3;\nconst int LBT_LANEMARKER_TRACK\t\t= 4;\n\nconst int LBT_SOLID\t=\t1;\nconst int LBT_DASH\t=\t2;\nconst int LBT_DOT\t=\t3;\n\nconst int LBT_UP\t=\t1;\nconst int LBT_DOWN\t=\t2;\n\nconst int LBT_FLAT\t=\t1;\nconst int LBT_STEP\t=\t2;\n\nconst int LPT_NONE\t=\t0;\nconst int LPT_BOTHSIDE\t=\t1;\n\nint calcIsrcFromUimage(PARAM_CAM *pCamParam, double dUimage);\ndouble calcUimageFromIsrc(PARAM_CAM *pCamParam, int iIsrc);\nint calcJsrcFromVimage(PARAM_CAM *pCamParam, double dVimage);\ndouble calcVimageFromJsrc(PARAM_CAM *pCamParam, int iJsrc);\ndouble calcZvehicleFromIsrc(PARAM_CAM *pCamParam, int iIsrc);\ndouble calcZvehicleFromIsrcByNearAreaParameter(PARAM_CAM *pCamParam, int iIsrc);\ndouble calcXcameraFromXvehicle(PARAM_CAM *pCamParam, double dXvehicle);\ndouble calcXcameraFromXvehicleByNearAreaParameter(PARAM_CAM *pCamParam, double dXvehicle);\ndouble calcXvehicleFromXcamera(PARAM_CAM *pCamParam, double dXcamera);\ndouble calcXvehicleFromXcameraByNearAreaParameter(PARAM_CAM *pCamParam, double dXcamera);\ndouble calcYcameraFromYvehicle(PARAM_CAM *pCamParam, double dYvehicle);\ndouble calcYvehicleFromYcamera(PARAM_CAM *pCamParam, double dYcamera);\ndouble calcZcameraFromZvehicle(PARAM_CAM *pCamParam, double dZVehicle);\ndouble calcZcameraFromZvehicleByNearAreaParameter(PARAM_CAM *pCamParam, double dZVehicle);\ndouble calcZvehicleFromZcamera(PARAM_CAM *pCamParam, double dZcamera);\ndouble calcUimageFromZcameraOnRoad(PARAM_CAM *pCamParam, double dZcamera);\ndouble calcUimageOfHorizontalLine(PARAM_CAM *pCamParam);\ndouble calcUimageFromYcamera(PARAM_CAM *pCamParam, double dYcamera);\ndouble calcYcameraFromUimage(PARAM_CAM *pCamParam, double dUimage);\ndouble calcYcameraFromUimageByNearAreaParameter(PARAM_CAM *pCamParam, double dUimage);\ndouble calcZcameraFromYcameraOnRoad(PARAM_CAM *pCamParam, double dYcamera);\ndouble calcZcameraFromYcameraOnRoadByNearAreaParameter(PARAM_CAM *pCamParam, double dYcamera);\ndouble calcXcameraFromVimage(PARAM_CAM *pCamParam, double dVimage);\ndouble calcVimageFromXcamera(PARAM_CAM *pCamParam, double dXcamera);\ndouble calcVimageFromXcameraAndZcameraOnRoad(PARAM_CAM *pCamParam, double dXcameraRoad, double dZcameraRoad);\ndouble calcVimageFromXcameraAndZcameraOnRoadByNearAreaParameter(PARAM_CAM *pCamParam, double dXcameraRoad, double dZcameraRoad);\nint calcHorizontalLineOfDepth(PARAM_CAM *pCamParam, double dZvehicle);\nint calcHorizontalLine(PARAM_CAM *pCamParam);\ndouble calcDepthOfHorizontalLine(PARAM_CAM *pCamParam, int iIsrc);\ndouble getWidthOfPixel(PARAM_CAM *pCamParam, int iIsrc);\ndouble getWidthOfPixelByNearAreaParameter(PARAM_CAM *pCamParam, int iIsrc);\n\n//////////////////////////// for topview image //////////////////////////\n#define\tROAD_TOP_Y\t50000\t//(100000)\t// ?H每那㏑?&?㏑※??*[[mm]\n#define\tROAD_LEFT_X\t(-10000)\t\t\t// ?H每那㏑?&???*[[mm]\n#define\tROAD_REAL_WIDTH\t(20000)\t\t\t// ?H每那㏑?&???[mm]\n#define\tROAD_REAL_HEIGHT\tROAD_TOP_Y\t// ?H每那㏑?&???[mm]\n\n#define\tROAD_IMAGE_WIDTH\t480//(192)\t// ?H每那㏑?&?㏑?㏑?&f[pix]\n#define\tROAD_IMAGE_HEIGHT\t(960)\t\t// ?H每那㏑?&??c㏑?&f[pix]\n\nstatic double g_dRoadTopY\t= \tROAD_TOP_Y;\nstatic double g_dRoadLeftX\t=\tROAD_LEFT_X;\nstatic double g_dRoadRealWidth\t=\tROAD_REAL_WIDTH;\nstatic double g_dRoadRealHeight\t=\tROAD_REAL_HEIGHT;\n\nstatic int g_iRoadImageWidth\t=\tROAD_IMAGE_WIDTH;\nstatic int g_iRoadImageHeight\t=\tROAD_IMAGE_HEIGHT;\n\ndouble getRoadTopY(void);\ndouble getRoadLeftX(void);\ndouble getRoadRealWidth(void);\ndouble getRoadRealHeight(void);\nint getRoadImageWidth(void);\nint getRoadImageHeight(void);\ndouble calcZvehicleFromIroad(int iIroad);\ndouble calcXvehicleFromJroad(int a_iJroad);\nint calcIroadFromZvehicle(double dZVehicle);\nint calcJroadFromXvehicle(double dXVehicle);\ndouble calcUimageFromZvehicleOnRoad(PARAM_CAM *pCamParam, DB dZVehicle);\t// Zvehicle->Uimage\ndouble calcZvehicleFromUimage(PARAM_CAM *pCamParam, double dUimage);\t// Uimage->Zvehicle\ndouble calcVimageFromXvehicleAndZvehicleOnRoad(PARAM_CAM *pCamParam, DB dXvehicleOnRoad, DB dZvehicleOnRoad);\ndouble calcVimageFromXvehicleAndZvehicleOnRoadByNearAreaParameter(PARAM_CAM *pCamParam, DB dXvehicleOnRoad, DB dZvehicleOnRoad);\nBOOL calcPosImageFromPosVehicle(PARAM_CAM *pCamParam, double dXvehicle, double dYvehicle, double dZvehicle, int *piIsrc, int *piJsrc);\ndouble calcXcameraOnRoadFromXcameraAndZcamera(PARAM_CAM *pCamParam, double dXcamera, double dZcamera);\ndouble calcXvehicleFromVimageAndZvehicle(PARAM_CAM *pCamParam, DB dVimage, DB dZvehicle);\ndouble calcIimageFromUimage(PARAM_CAM *pCamParam, double dUimage);\ndouble calcJimageFromVimage(PARAM_CAM *pCamParam, DB dVimage);\ndouble calcUimageFromIimage(PARAM_CAM *pCamParam, DB dIimage);\ndouble calcVimageFromJimage(PARAM_CAM *pCamParam, DB dJimage);\nBOOL calcPosImageFromPosRoad(PARAM_CAM *pCamParam, int iIroad, int iJroad, double *pdIimage, double *pdJimage);\nBOOL calcPosRoadFromPosImage(PARAM_CAM *pCamParam, int iIimage, int iJimage, double *pdIRoad, double *pdJRoad);\n\n\n"
},
{
"alpha_fraction": 0.6659995317459106,
"alphanum_fraction": 0.7427173852920532,
"avg_line_length": 35.96164321899414,
"blob_id": "3a22c404c3536052ae4df1171266c47918fd8098",
"content_id": "2269e6b223b102a39652745745bcde9a9e421e47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 16147,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 365,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/config.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#ifndef _CONFIG_H_\n#define _CONFIG_H_\n\n//#define\tFOR_FC2\n//#define\tFOR_TMC\n//#define\tKATAOKA_LOGIC\n\n#define\tUSE_CALC_PROCLINE\n#define\tNOT_USE_DISPARITY\n//#define\tFORCE_UseDisparity\n//#define\tNOT_USE_ADJUST_EDGE_THRESHOLD\n#define\tNOT_USE_FARAREA\n//#define\tSIMPLE_CALC_EDGE_STRENGTH\n//#define\tNOT_USE_selectLaneMarkerAtComplexLaneBoundary\n//#define\tUSE_checkMismatchOfLaneMarkerPointsFromLaneParameter\n//#define\tUSE_BB_DIFF\n#define\tUSE_NON_DISPARITY_EDGEPOINT\n//#define\tNOT_USE_CONSTRANT_OF_GAP_BETWEEN_EDGE_POINT\n#define\tNOT_USE_GRIDMAP1D\n\n//#define\tFOR_WTP\n/////////////////////////////////////////////////////////////////////////////////\n#define\tNOT_USE_TMC_STEREO_BMP\n//#define\tUSE_TYTLABS_STEREO_AVI\n//#define\tFOR_DEMONSTRATION1\n//#define\tFOR_DEMONSTRATION2\n//#define\tLONG_FOCAL_LENGTH\n\n//#define\tFOR_TME\n//#define\tWINDOWS_DEBUG_MODE\n//#define\tNOT_USE_ROAD_WINDOW\n//#define\tNOT_USE_CONTROL_WINDOW\n#define\tNOT_USE_CONSOLE\n#define\tNOT_USE_LISTWINDOW\n\n//#define\tOVERLAY_ROADIMAGE_ON_INPUTIMAGE\n//#define\tUSE_DYNAMIC_SYSTEM_NOISE\n#define\tPROCESS_PRIORITY\t5\n\n#define\tSCALE_TO_DRAW_POINT\t1\n\n//#define\tLONG_FOCAL_LENGTH\n\n#define\tSAFE_DELETE(p)\t{\tif(p != NULL)\t{\tdelete p;\tp = NULL;\t}\t}\n#define\tSAFE_DELETE_ARRAY(p)\t{\tif(p != NULL)\t{\tdelete [] p;\tp = NULL;\t}\t}\n\n#define\tGM_MOVINGAVERAGE_NUMBER\t10\n#define\tGM_MOVINGAVERAGE_INVALID_VALUE\t(1000000)\n\n\n#define\tACC_FAC\t\t100\n#define\tNO_STEER_SIGNAL\n#define FIND_WIDE_LANE\n\n#ifndef\tM_PI\n#define\tM_PI\t(3.141592)\n#endif\n\n\n//yuhuan 10.19\n//#define\tDB_REAL_FILTER_LENGTH\t(80)\n#define\tDB_REAL_FILTER_LENGTH\t(80) //边沿检测滤波长度\n\n// 边缘梯度能量阈值\n#if\tdefined(FOR_DEMONSTRATION1) || defined(FOR_DEMONSTRATION2)\n#define\tCS4_POSITIVETHRESHOLD\t(S4)64\n#define\tCS4_NEGATIVETHRESHOLD\t(S4)(-(CS4_POSITIVETHRESHOLD))\t// upper boundary of down edge strength\n#elif\tdefined(FOR_TME)\n#define\tCS4_POSITIVETHRESHOLD\t(S4)64\n#define\tCS4_NEGATIVETHRESHOLD\t(S4)(-(CS4_POSITIVETHRESHOLD))\t// upper boundary of down edge strength\n#else\tFOR_DEMONSTRATION\n#define\tCS4_POSITIVETHRESHOLD\t(S4)16\n#define\tCS4_NEGATIVETHRESHOLD\t(S4)(-(CS4_POSITIVETHRESHOLD))\n#endif\tFOR_DEMONSTRATION\n\n#define\tCS4_NEAR_BASEY\t7000\n#define\tCS4_NEARHEIGHT\t10000\t //(CS4_NEAR_TOPY - CS4_NEAR_BASEY)\n#define\tCS4_NEAR_TOPY\t(CS4_NEAR_BASEY + CS4_NEARHEIGHT)\t//17000\n#define\tCS4_NEARIMAGEHEIGHT\t\t100 //近处图像高度\n#define\tCS4_NEARWIDTH\t\t\t50000 //实际道路横向宽度\n#define\tCS4_NEARIMAGEWIDTH\t\t962 //近处图像宽度\n//#define\tCS4_NEAR_MAX_YAW\t22//50\n//#define\tCS4_NEAR_MAX_YAW\t44\t// 20100831\n\n#if\t!defined(FOR_DEMONSTRATION2)\n#define\tCS4_NEAR_MAX_YAW\t88\t// 20111107\n#else\t!defined(FOR_DEMONSTRATION2)\n#define\tCS4_NEAR_MAX_YAW\t44\t// 20111107\n#endif\t!defined(FOR_DEMONSTRATION2)\n\n#define\tCS4_NUMBER_OF_NEAR_YAW\t\t(CS4_NEAR_MAX_YAW * 2 + 1)\n#define\tCS4_NUMBER_OF_NEAR_OFFSET\tCS4_NEARIMAGEWIDTH\n#define\tCS4_NUNBER_OF_NEAR_BALLOT\t256\n\n#define\tCS4_FAR_BASEY\tCS4_NEAR_TOPY\t//17000\n#define\tCS4_FARHEIGHT\tCS4_NEARHEIGHT\t//10000\t//(CS4_FAR_TOPY - CS4_FAR_BASEY)\n\n#define\tCS4_FAR_TOPY\t(CS4_FAR_BASEY + CS4_FARHEIGHT)\t//27000\n#define\tCS4_FARIMAGEHEIGHT\t\tCS4_NEARIMAGEHEIGHT\t//100\n#define\tCS4_FARWIDTH\t\t\tCS4_NEARWIDTH\t//10000\n#define\tCS4_FARIMAGEWIDTH\t\tCS4_NEARIMAGEWIDTH\t//192\n\n#define\tCS4_FARAREA_TOPY\t80000\n#define\tCS4_WEIGHT_MAX\t1\n\n#define\tCS4_MINIMUM_VOTES\t\t(CS4_WEIGHT_MAX * 30)\t// 20110630\n#define\tVOTING_THRESHOLD_RATIO\t5\n\n/////////////////////////////////\n//#define\tCS4_MAXIMUM_LINEWIDTH\t500\n#define\tCS4_MAXIMUM_LINEWIDTH\t1000\t// 2011124\n//TODO 100\n#define\tCS4_NEAR_MINIMUM_LINEWIDTH\t80\t// 20110704\n\n#define\tV_V_M\t1\n#define\tFALSE\t0\n#define\tTRUE\t1\n\n#define CS4_NOISE\t1\n\n#define\tDB_OFFSET_MAX\t\t\t(2000)\n#define\tDB_OFFSET_MIN\t\t\t(-2000)\n#define\tDB_YAW_MAX\t\t\t(45.0 / 180 * M_PI)\n#define\tDB_YAW_MIN\t\t\t(-DB_YAW_MAX)\n#define\tDB_CURVATURE_MAX\t\t(5e-5)\n#define\tDB_CURVATURE_MIN\t\t(-5e-5)\n\n#ifdef FIND_WIDE_LANE\n#define\tDB_LW_MAX\t\t\t(8000)\n#else FIND_WIDE_LANE\n#define\tDB_LW_MAX\t\t\t(4000)\n#endif FIND_WIDE_LANE\n\n#define\tDB_LW_MIN\t\t\t(2000)\n\n#define\tDB_PITCH_MAX\t(5.0 / 180. * M_PI)\n#define\tDB_PITCH_MIN\t(-DB_PITCH_MAX)\n\n#define\tCDB_OFFSET_SCALE\t(1.)\n#define\tCDB_YAW_SCALE\t\t(1.)\n#define\tCDB_C_SCALE\t\t\t(1.)\n#define\tCDB_LW_SCALE\t\t(1.)\n#define\tCDB_DP_SCALE\t\t(1.)\n\n#define\tCS4_LW_LSB\t(1.)\n#define\tCS4_DP_LSB\t(10000.)\n#define\tCS4_CURVATURE_LSB\t(100000. * 1000)\n\n\n// 20110928\n#define\tDEFAULT_SEARCH_MARGIN_IN_NEAR_AREA\t(300.)\n#define\tDEFAULT_SEARCH_MARGIN_IN_FAR_AREA\t(300.)\n//#define\tDEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_NEAR_AREA (100.)\n//#define\tDEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_NEAR_AREA (50.)\t// 20111007\n//#define\tDEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_FAR_AREA\t(100.)\n#define\tDEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_NEAR_AREA (50.)\t// 20111121\n#define\tDEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_FAR_AREA\t(50.)\t// 20111121\n\n#define\tDB_OFFSET_AVE\t\t(0)\n#define\tDB_YAW_AVE\t\t\t(0)\n#define\tDB_CURVATURE_AVE\t(0)\n#define\tDB_LW_AVE\t\t\t(3500)\n#define\tDB_DP_AVE\t\t\t(0)\n\n#define\tCDB_SIGMA_PSI\t(0.0)\t// 20111026\n#define\tCDB_SIGMA_PSIDASH\t(0.5 / 180 * M_PI)\n\n#define\tCDB_SIGMA_E\t\t\t(0.0)// [mm]\t// 20111026\n#define\tCDB_SIGMA_EDASH\t(1.0 * 1000)\t// [mm/s]\n\n#define\tCDB_SIGMA_RHO\t(0.0)\t// [1/mm]\t// 20111026\n//#define\tCDB_SIGMA_RHODASH\t(1.0e-6 * 1.0e-6)\t// [1/mm^2]// 20111025\n#define\tCDB_SIGMA_RHODASH\t(1.0e-5 * 1.0e-6)\t// [1/mm^2]// 20111026\n\n#define\tCDB_SIGMA_DELTAPHI\t(1.0 / 180 * M_PI)\n#define\tCDB_SIGMA_WIDTH\t(1.0 * 1000)\t// [mm]\n\n// configuration\n#define\tMINIMUM_CV\t(0.0001)\n#define\tMINIMUM_AVERAGE_EDGE_STRENGTH_RATIO\t(0.5)\n//#define\tMAXMIMUM_DIFF_OF_ENDPOINT\t(5000.)\n#define\tMAXMIMUM_DIFF_OF_ENDPOINT\t(1000.)\n//#define\tMINIMUM_RATIO_OF_POINTNUM\t(0.5)\n//#define\tMINIMUM_RATIO_OF_POINTNUM\t(0.8)\t// 20111121\n#define\tMINIMUM_RATIO_OF_POINTNUM\t(0.7)\n#define\tMINIMUM_RATIO_OF_VOTES\t(0.5)\n#define\tMINIMUM_FILTER_LENGTH\t(2)\n#define\tMAXIMUM_YAW_OF_LANEMARKERLINESEQUENCE\t(20)\n//TODO 3. / 8\n#define\tMINIMUM_OFFSET_FOR_LANEMARKERLINESEQUENCE_INHIBIT\t(4. / 8)\n//TODO 6. / 8\n#define\tMAXIMUM_OFFSET_FOR_LANEMARKERLINESEQUENCE_INHIBIT\t(5. / 8)\n//TODO 50\n#define\tMAXIMUM_HORIZONTAL_DIFF_AT_REGION_BOUNDARY\t(30.)\n//TODO 5\n#define\tMAXIMUM_YAW_DIFF_AT_REGION_BOUNDARY\t(50)\n#define\tOFFSETMARGIN_FOR_LANEMARKERLINE_3D\t5\n#define\tSEARCH_MARGIN_FOR_LANEBOUNDARY\t(500.)\n///////#define\tSEARCH_MARGIN_FOR_YAW\t(20)\n\n//#define\tINSIDE_OF_LANEBOUNDARY_FOR_LANE_BOUNDARY\t(1000.)\t//(500.)\t//20111103(枹巊梡)\n#define\tOUTSIDE_OF_LANEBOUNDARY_FOR_LANE_BOUNDARY\t(1500.)\n//#define\tMARGIN_OFFSET_FOR_LANE_BOUNDARY_TRACKING\t(500.)\n//TODO 250\n#define\tMARGIN_FOR_SEARCH_LANEMARKER_WITH_INITIAL_PARAMETER\t(200.)\n\n//#define\tMAXIMIM_YAW_DIFF_AT_REGION_BOUNDARY\t(10)\n\n#define OFFSET_SEARCH_MARGIN_SEQUENCE\t(200.)\t\t\t\t// 嬤朤幵慄嫬奅偲墦曽幵慄嫬奅偺椞堟嫬奅偱偺偢傟偺嫋梕抣\t// 20110703\n#define YAW_SEARCH_MARGIN_SEQUENCE\t(3.0 / 180. * M_PI)\t\t\t\t// 嬤朤幵慄嫬奅偲墦曽幵慄嫬奅偺椞堟嫬奅偱偺偢傟偺嫋梕抣\t// 20110703\n//#define OFFSET_SEARCH_MARGIN_TRACK\t(1000.)\t\t\t\t// 嬤朤幵慄嫬奅偲墦曽幵慄嫬奅偺椞堟嫬奅偱偺偢傟偺嫋梕抣\t// 20110703\n#define OFFSET_SEARCH_MARGIN_TRACK\t(200.)\t\t\t\t// 嬤朤幵慄嫬奅偲墦曽幵慄嫬奅偺椞堟嫬奅偱偺偢傟偺嫋梕抣\t// 20111129\n#define YAW_SEARCH_MARGIN_TRACK\t(1.0 / 180. * M_PI)\t\t\t\t// 嬤朤幵慄嫬奅偲墦曽幵慄嫬奅偺椞堟嫬奅偱偺偢傟偺嫋梕抣\t// 20110703\n\n//#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_I\t(10)\t// 婛専弌揰偲偺悅捈曽岦偺嵟戝僊儍僢僾(巄掕)\n//#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_J\t(10)\t// 婛専弌揰偲偺悈暯曽岦偺嵟戝僊儍僢僾(巄掕)\n#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_I\t(100)\t// 婛専弌揰偲偺悅捈曽岦偺嵟戝僊儍僢僾(巄掕)// 20120416\n#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_J\t(100)\t// 婛専弌揰偲偺悈暯曽岦偺嵟戝僊儍僢僾(巄掕)// 20120416\n\n#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_Z\t(15.0 * 1000)\t// 20111121// 婛専弌揰偲偺倅曽岦偺嵟戝僊儍僢僾(巄掕)\n#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_X\t(0.5 * 1000)\t// 20120118// 婛専弌揰偲偺倃曽岦偺嵟戝僊儍僢僾(巄掕)\n//#define\tMAXIMUM_DIFF_FROM_PREVIOUS_FOUND_X\t(2.0 * 1000)\t// 20120124\n\n////#define\tDB_OFFSET_MARGIN_OUTSIDE\tOUTSIDE_OF_LANEBOUNDARY_FOR_LANE_BOUNDARY\t// 20111103//枹巊梡\n////#define\tDB_OFFSET_MARGIN_INSIDE\t\tINSIDE_OF_LANEBOUNDARY_FOR_LANE_BOUNDARY\t// 20111103//枹巊梡\n\n\n#define\tMAX_DISTANCE_OF_FIRST_DETECTED_POINT\t(50000.)\t// 墦曽敀慄揰扵嶕帪偵巒傔偰尒偮偐偭偨揰偺嵟彫嫋梕嫍棧(墦偡偓傞揰偰弶傔偰尒偮偐偭偨応崌偼拞巭)\n#define\tMAXIMUM_RATIO_OF_REJECTED_POINTS\t(0.5)\t// 攋婞偝傟偨幵慄嫬奅揰偺妱崌偺嵟戝抣(巄掕)\n\n\n#define\tMINIMUM_SEARCHMARGIN_IN_PIXEL\t10\t// 悇掕埵抲傪拞怱偲偡傞扵嶕斖埻偺嵟彫暆[pix]\t// 20111109\n\n\n//#define\tEDGETHRESHOLD_FACTOR_FOR_LANEMARKER\t6\n//#define\tEDGETHRESHOLD_FACTOR_FOR_LANEMARKER\t8\t// 20111103\n//#define\tEDGETHRESHOLD_FACTOR_FOR_LANEMARKER\t6\t// 20111107\n//#define\tEDGETHRESHOLD_FACTOR_FOR_LANEMARKER\t2\t// 20111109\n#define\tEDGETHRESHOLD_FACTOR_FOR_LANEMARKER\t1\t// 20111124\t// 敀慄岓曗専弌帪偼丆暯嬒僄僢僕嫮搙偺鑷抣傪僄僢僕揰専弌帪偺鑷抣傛傝崅傔偵愝掕偡傞\n#define\tCURB_MARGIN\t300\t//[mm]\t// 棫懱幵慄嫬奅扵嶕斖埻偺幵慄嫬奅敀慄偐傜偺嫍棧[mm]\n#define\tEDGESTRENGTH_RATIO_TO_ELIMINATE\t0.5\t// 20110921// 暯嬒僄僢僕嫮搙偲斾妑偟庛偄僄僢僕揰傪嶍彍\n#define\tMAX_SECOND_VALUE\t(400.)\t// 枹巊梡\n\n#define\tLANE_BOUNDARY_POINTS_NUMBER_FOR_PARAMETER_ESTIMATION\t10\t// 僷儔儊乕僞悇掕偵梡偄傞幵慄嫬奅揰悢\n#define\tMINIMUM_RATIO_OF_EDGESTRENGTH_FOR_FARAREA_LANEBOUNDARY\t0.5\t// 墦曽敀慄揰扵嶕帪偵嫋梕偡傞嬤朤敀慄揰偺暯嬒僄僢僕嫮搙偲偺斾\n\n\n\n#ifndef\tFOR_TME\n#define\tCOUNT_FOR_FOUND_STATUS_LANE_SIDE\t5\t// 敪尒仺捛愓堏峴僼儗乕儉悢(嵍塃)\n#define\tCOUNT_FOR_LOST_STATUS_LANE_SIDE\t\t3\t// 捛愓仺敪尒堏峴僼儗乕儉悢(嵍塃)\n//#define\tCOUNT_FOR_LOST_STATUS_LANE_SIDE\t\t10\t// 捛愓仺敪尒堏峴僼儗乕儉悢(嵍塃)\t20111003\n#define\tCOUNT_FOR_FOUND_STATUS_LANE_REGION\t5\t// 敪尒仺捛愓堏峴僼儗乕儉悢(嬤墦)\n#define\tCOUNT_FOR_LOST_STATUS_LANE_REGION\t3\t// 捛愓仺敪尒堏峴僼儗乕儉悢(嬤墦)\n//#define\tCOUNT_FOR_LOST_STATUS_LANE_REGION\t10\t// 捛愓仺敪尒堏峴僼儗乕儉悢(嬤墦)\t20111003\n#define\tLBT_LANEMARKER_FOUND_COUNT\t3\t// 敀慄敪尒仺捛愓堏峴僼儗乕儉悢\n#define\tLBT_LANEMARKERLINE_FOUND_COUNT\t3// 僄僢僕慄敪尒仺捛愓堏峴僼儗乕儉悢\n//#define\tLBT_LANEMARKERLINE_FOUND_COUNT\t5\t// 20111003\n#define\tLBT_LANEMARKER_LOST_COUNT\t3\t// 敀慄捛愓仺枹専弌堏峴僼儗乕儉悢\n#define\tLBT_LANEMARKERLINE_LOST_COUNT\t3\t// 僄僢僕慄捛愓仺枹専弌堏峴僼儗乕儉悢\n//#define\tLBT_LANEMARKERLINE_LOST_COUNT\t5\t// 20111003\n#else\tFOR_TME\n#define\tCOUNT_FOR_FOUND_STATUS_LANE_SIDE\t5\t// 敪尒仺捛愓堏峴僼儗乕儉悢(嵍塃)\n#define\tCOUNT_FOR_LOST_STATUS_LANE_SIDE\t\t10\t// 捛愓仺敪尒堏峴僼儗乕儉悢(嵍塃)\t20111003\n#define\tCOUNT_FOR_FOUND_STATUS_LANE_REGION\t5\t// 敪尒仺捛愓堏峴僼儗乕儉悢(嬤墦)\n#define\tCOUNT_FOR_LOST_STATUS_LANE_REGION\t10\t// 捛愓仺敪尒堏峴僼儗乕儉悢(嬤墦)\t20111003\n#define\tLBT_LANEMARKER_FOUND_COUNT\t3\n#define\tLBT_LANEMARKERLINE_FOUND_COUNT\t3\n#define\tLBT_LANEMARKER_LOST_COUNT\t10\n#define\tLBT_LANEMARKERLINE_LOST_COUNT\t10\n#endif\tFOR_TME\n\n\n\n\n#define\tCOUNT_FOR_SOLID_LINE\t2\t// 幚慄専弌敾掕僼儗乕儉悢\n#define\tCOUNT_FOR_DASH_LINE\t2\t// 攋慄専弌敾掕僼儗乕儉悢\n//#define\tCOUNT_FOR_DASH_LINE\t10\t// 攋慄専弌敾掕僼儗乕儉悢\t20111107\n#define\tMIN_RATIO_OF_SOLID_LINE_FOR_ONELINE_FILTER\t(0.7)\t// 20120127\t// 幚慄偲傒側偡堊偺張棟儔僀儞悢偵懳偡傞幵慄嫬奅揰悢偺斾偺嵟彫抣\n// TODO 0.25\n#define\tAVERAGE_EDGE_STRENGTH_AND_TH_RATIO\t(0.8)\t// 暯嬒僄僢僕嫮搙偵傛傞摦揑鑷抣愝掕帪偺僷儔儊乕僞\n\n//#define\tMISMATCH_OF_LANEMARKERPOINTS_FROM_LANEPARAMATER_PIXEL\t(10)\t// 幵慄嫬奅揰偲憱楬僷儔儊乕僞偵傛傞幵慄嫬奅偲偺偢傟偺嫋梕暆[pix]\n#define\tMISMATCH_OF_LANEMARKERPOINTS_FROM_LANEPARAMATER_PIXEL\t(20)\t// 幵慄嫬奅揰偲憱楬僷儔儊乕僞偵傛傞幵慄嫬奅偲偺偢傟偺嫋梕暆[pix]\t// 20111201\n#define\tMISMATCH_OF_LANEMARKERPOINTS_FROM_LANEPARAMETER_RATIO\t(0.5)\t// 幵慄嫬奅揰偲憱楬僷儔儊乕僞偵傛傞幵慄嫬奅偲偺偢傟検偺斾偺嫋梕検\n\n/// V moved at 20120313 ///////////////////////////////\n#define\tEXTENDED_HEIGHT_FOR_FAR_AREA\t20\t// 屌掕僺僢僠妏偱嶼弌偝傟傞墦曽椞堟偺嵟墦儔僀儞傪夋憸忋曽傊偢傜偟丆墦曽椞堟傪奼戝偡傞\n\n#define\tFACTOR_OF_LANEMARKER_WIDTH_FOR_FARSIDE\t2\t// 敀慄岓曗偺墦曽懁敀慄暆偺惂栺偺娚榓學悢\n\n#define\tFACTOR_FOR_AVERAGE_INTENSITY_OF_LANEMARKER\t2\t// 暯嬒婸搙偵娭偡傞敀慄忦審偺學悢\n\n#define\tLANEMARKERPAIR_MINIMUM_WIDTH\t(2500.)\t// 敀慄儁傾帪偺嫋梕嵟彫娫妘\n#define\tLANEMARKERPAIR_MAXIMUM_WIDTH\t(5000.)\t// 敀慄儁傾帪偺嫋梕嵟戝娫妘\n//#define\tLANEMARKERPAIR_MAXIMUM_YAWDIFF_DEG\t(10.)\t// 敀慄儁傾帪偺嫋梕嵟戝儓乕偢傟(屌掕僺僢僠妏)//10\n#define\tLANEMARKERPAIR_MAXIMUM_YAWDIFF_DEG\t(4.)//2014.9.25\n\n//#define\tMULTIDIRECTION_CALCEDGESTRENGTH\t\t// 巜掕曽岦(0,45,-45搙)偺僄僢僕嫮搙傪嶼弌\n\n#define\tFACTOR_FOR_FAR_END_OF_PROCLINE\t1 / 5\t// 夋憸忋晹偺僄僢僕揰専弌枹張棟椞堟\n\n#define\tINVALID_DISTANCE_VALUE\t(100. * 1000)\t// 柍岠側嫍棧偺抣[mm]\n\n//#define\tMAXIMUM_YAW_DIFF_IN_COMPLEX_LANE_MARKER\t(5)\n#define\tMAXIMUM_YAW_DIFF_IN_COMPLEX_LANE_MARKER\t(10)\t// 20111124\t// 暋崌慄偺暯峴慄孮偲傒側偡堊偺儓乕妏偺嵎偺嵟戝抣[pix]\n#define\tMAXIMUM_OFFSET_DIFF_IN_COMPLEX_LANE_MARKER\t(10)// 暋崌慄偺暯峴慄孮偲傒側偡堊偺僆僼僙僢僩偺嵎偺嵟戝抣[pix]\n#define\tMINIMUM_OFFSET_DIFF_IN_COMPLEX_LANE_MARKER\t(-3)// 暋崌慄偺暯峴慄孮偲傒側偡堊偺僆僼僙僢僩偺嵎偺嵟彫抣[pix]\n\n#define\tMINIMUM_POINT_NUMBER_FOR_LANEMARKER_TYPE\t10\n\n#define\tMAXIMUM_LINE_SEGMENT_FOR_LANEMARKER_TYPE\t10\n\n#define\tMAXIMUM_GAP_OF_LINE_SEGMENT_FOR_LANEMARKER_TYPE\t20\n\n\n#define\tMAXIMUMLINE_SEGMENT_OF_DASH_LANEBOUNDARY_TYPE\t2\t// 攋慄偲傒側偡偨傔偺嵟戝慄暘悢\n\n// 暋崌慄儌乕僪偺忬懺慗堏梡僇僂儞僞偺鑷抣\n#define\tFRAME_CLBT_SINGLE_TO_NONE\t10//5\t//暋崌慄儌乕僪偺扨弮慄仺枹専弌忬懺慗堏偺偨傔偺僼儗乕儉悢\n#define\tFRAME_CLBT_DOUBLE_TO_NONE\t10//5\t//暋崌慄儌乕僪偺擇廳慄仺枹専弌忬懺慗堏偺偨傔偺僼儗乕儉悢\n#define\tFRAME_CLBT_DOUBLE_TO_SINGLE\t10//5\t//暋崌慄儌乕僪偺擇廳慄仺扨弮慄忬懺慗堏偺偨傔偺僼儗乕儉悢\n#define\tFRAME_CLBT_TRIPLE_TO_NONE\t10//5\t//暋崌慄儌乕僪偺嶰廳慄仺枹専弌忬懺慗堏偺偨傔偺僼儗乕儉悢\n#define\tFRAME_CLBT_TRIPLE_TO_SINGLE\t10//5\t//暋崌慄儌乕僪偺嶰廳慄仺扨弮慄忬懺慗堏偺偨傔偺僼儗乕儉悢\n#define\tFRAME_CLBT_TRIPLE_TO_DOUBLE\t10//5\t//暋崌慄儌乕僪偺嶰廳慄仺擇廳慄忬懺慗堏偺偨傔偺僼儗乕儉悢\n\n#define\tGAP_IN_COMPLEX_LANE_BOUNDARY\t2000.\t// 枹巊梡\n\n#define\tMININUM_POINT_NUMBER_FOR_NEAR_AREA\t10\t// 憱楬僷儔儊乕僞偵傛傞嬤朤椞堟偺幵慄嫬奅揰扵嶕帪偺嵟彮揰悢\n#define\tMAXIMUM_GAP_BETWEEN_CONNECTED_POINTS\t3\n\n#define\tBOTHSIDE_LOST_COUNT\t10\n\n#define\tFACTOR_FOR_SEARCHING_IN_DEFAULT_LANE_POSITION\t2\n\n\n#define\tMAXIMUM_YAW_FOR_MAIN_LANE\t(1.0 / 180 * M_PI)\n#define\tMINIMUM_YAW_FOR_BRANCH_LANE\t(5.0 / 180 * M_PI)\n\n#define\tCALC3DPLANE_MAXIMUM_A\t0.5\n\n#define\tMAXIMUM_AVERAGE_DISTANCE_FROM_3D_SURFACE\t100.\n#define\tMAXINUM_GAP_FOR_ISOLATED_LANE_BOUNDARY\t(15000.)\n\n#define\tMAXIMUM_ISRC_DIFF_FOR_ISOLATED_LANE_BOUNDARY_POINTS\t(5)\n#define\tMAXIMUM_JSRC_DIFF_FOR_ISOLATED_LANE_BOUNDARY_POINTS\t(5)\n\n#define\tSEARCH_MARGIN_IN_NEAR_AREA\t(500.)\n#define\tSEARCH_MARGIN_IN_FAR_AREA\t(500.)\n\n//#define\tDB_OFFSET_MIN_FACTOR\t1\n//#define\tDB_OFFSET_MIN_FACTOR\t10\t//20111122\n#define\tDB_OFFSET_MIN_FACTOR\t5\n#define\tOBSERVER_ERROR_FACTOR\t1.0\n#define\tMINIMUM_VELOCITY (30. * 1000 / 3600. * 1000)\n\n#define\tMINIMUM_Z3D\t(1.0 * 1000)\n//#define\tTH_ABOVE_GROUND\t100.0\n#define\tTH_ABOVE_GROUND\t150.0\n\n#define\tLENGTH_TO_ELIMINATE_SHORT_LANEMARKERLINES\t(2000.)\n#define\tLENGTH_TO_ELIMINATE_FEWPOINT_LANEMARKERLINES\t(20)\n\n\n#endif _CONFIG_H_\n"
},
{
"alpha_fraction": 0.4944029748439789,
"alphanum_fraction": 0.5090174078941345,
"avg_line_length": 32.48958206176758,
"blob_id": "9061711eda061cec4ea9f3b842965077370b05f4",
"content_id": "b69d8b17da2ea0d80d5465982e700755af8b9a08",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4214,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 96,
"path": "/athena/core/arm/Map/include/MapInterface.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n* @file map_interface.h\n* @brief 导航模块\n* @details 1、对构建地图中的车道进行检查;2、地图上点间的集合属性,包括角度,距离等;3、地图的空间分区域检索检索;4、查询车道、车道的左右边线、以及边线上的点; 5、查询车道上的属性;6、查询道路标志等一些规则属性;7实现道路拓扑的路由\n* @author huanhuan\n* @date 2018/7/16\n* @version v1.0\n* @par Copyright (c):\n* 武汉环宇智行科技有限公司\n* @par History:\n* version: author, date, desc\\n\n*/\n#ifndef _MAP_INTERFACE_\n#define _MAP_INTERFACE_\n\n#include \"MapData.h\"\nnamespace athena\n{\nnamespace roadmap\n{\n\nclass RoadMap\n{\npublic:\n //存储地图数据\n OSMMap *osm_map_;///< osm地图\n //二期新增动态字段,csu/rsu/obu_planning在路径规划、重规划、每秒刷新时重新设置这些信息\n std::vector<light> light_list_; ///<红绿灯列表\n std::vector<limspeed> limspeed_list_; ///<限速牌列表\n std::vector<block> block_list_; ///<施工标志列表\n std::vector<lane> lane_list_; ///<路径列表\n std::vector<cross_regulator> cross_list_; ///<路口列表\n\n RoadMap(std::string filename);\n\n\n\n /////////////////////////////////////////////////\n /// \\brief 将定位点匹配到最近的lanelet上,返回地图里的lane_id,0表示匹配不到lane\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\param yaw:当前头指向\n /// \\return 返回定位车道的id,0表示匹配不到lane\n /////////////////////////////////////////////////\n int64_t locate_point_on_lanelet(double lat, double lon, double yaw = -1);\n\n ///将定位点匹配到最近的lanelet上,返回lane_list中匹配上的lane id,0表示匹配不到lane\n /////////////////////////////////////////////////\n /// \\brief 将定位点匹配到最近的lanelet上,返回地图里的lane_id,0表示匹配不到lane\n /// \\param lat:当前定位纬度\n /// \\param lon:当前定位经度\n /// \\param yaw:当前头指向\n /// \\return 返回定位车道的id,0表示匹配不到lane\n /////////////////////////////////////////////////\n int64_t locate_point_on_lane_list(double lat, double lon, const std::vector<int64_t> &lane_list);\n\n /////////////////////////////////////////////////\n /// \\brief 从osm地图中获取lane相关信息\n /// \\param lane_id:输入lane_id信息\n /// \\param lane:输出当前的车道\n /// \\return RET_OK 0; RET_ERROR 1\n /////////////////////////////////////////////////\n int get_lane_from_map_by_id(int64_t lane_id, lane* route_lane);\n\n /////////////////////////////////////////////////\n /// \\brief 获取车道的左边线\n /// \\param lane_id:输入lane_id信息\n /// \\param line:输出当前车道左边线的点序列\n /// \\return RET_OK 0; RET_ERROR 1\n /////////////////////////////////////////////////\n int get_left_line_of_lane(int64_t lane_id, std::vector< point_with_id_t > line);\n\n /////////////////////////////////////////////////\n /// \\brief 获取车道的右边线\n /// \\param lane_id:输入lane_id信息\n /// \\param line:输出当前车道右边线的点序列\n /// \\return RET_OK 0; RET_ERROR 1\n /////////////////////////////////////////////////\n int get_right_line_of_lane(int64_t lane_id, std::vector< point_with_id_t > line);\n\n /////////////////////////////////////////////////\n /// \\brief 修改拓扑图中对应边的权重\n /// \\param lane_id:输入lane_id信息\n /// \\param cost: 每个车道跟下个车道的连接权重,默认为当前车道的长度,增加量可以为负值\n /// \\param int: forward == 0 表示更改当前车道与下一个车道的连接权重;forward == 1 表示更改当前车道与上一个车道的连接权重\n /// \\return RET_OK 0; RET_ERROR 1\n /////////////////////////////////////////////////\n int add_graph_edge_weight(int64_t lane_id, double cost, int forward = 0);\n\nprivate:\n void get_flow_light_and_points_from_map();\n};\n\n}\n}\n#endif // _MAP_INTERFACE_\n\n"
},
{
"alpha_fraction": 0.5678392052650452,
"alphanum_fraction": 0.5795645117759705,
"avg_line_length": 34,
"blob_id": "a60ba52f17ae206899c98d1507499dbf03d5ad16",
"content_id": "72a9879d59c6e2dead08ebcac83641c485c93adc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 609,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 17,
"path": "/athena/core/x86/Camera/lane_detect/include/utils/RefOffset.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "WINDOWS-1252",
"text": "\n// ???€?I?t?Z?b?g\nclass RefOffset {\nprivate:\n\tint _iInitCnt;\t// ?¡ë?¨²?J?E?¡°?^\n\tint _iCnt;\t\t// ?J?E?¡°?^\n\tint _aiOffset[CS4_NOISE];\t// ???n¡ª??f?[?^?o?b?t?@\npublic:\n\tinline RefOffset(void)\t:\t_iInitCnt(0), _iCnt(0)\t{\n\t\t{for(int iIdx = 0; iIdx < CS4_NOISE; iIdx++)\t{\t_aiOffset[iIdx] = 0;\t}}\n\t}\n\tinline int InitCnt(void)\t{\treturn _iInitCnt;\t}\n\tinline void InitCnt(int v)\t{\t_iInitCnt = v;\t\t}\n\tinline int Cnt(void)\t\t{\treturn _iCnt;\t\t}\n\tinline void Cnt(int v)\t\t{\t_iCnt = v;\t\t\t}\n\tinline int Offset(int iIdx)\t{\treturn _aiOffset[iIdx];\t}\n\tinline void Offset(int iIdx, int v)\t{\t_aiOffset[iIdx] = v;\t}\n};\n\n"
},
{
"alpha_fraction": 0.5902354121208191,
"alphanum_fraction": 0.6207497715950012,
"avg_line_length": 18.758621215820312,
"blob_id": "d605e16a7667663355a0db02d273cf60a159f1de",
"content_id": "5b93afc0417c6961a7832a9c460052ee14674272",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1383,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 58,
"path": "/athena/examples/LCM/Singlecar/control/common/control_cmd.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n/**\n * @file control_logic.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_CONTROL_CMD_H_\n#define COMMON_CONTROL_CMD_H_\n\n#include \"chassis.h\"\n#include \"trajectory.h\"\n#include \"localization.h\"\n#include \"controller_output.h\"\n#include \"controller.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n/**\n * @class ControlCmd\n * @brief 控制命令.\n */\nclass ControlCmd\n{\npublic:\n ControlCmd() = default;\n ~ControlCmd() =default;\n ///方向盘转角,单位:度\n \tdouble steering_angle_;\n \t///方向盘角速度,单位:度/s\n \tdouble steering_angle_speed_;\n \t///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n \tint32_t steering_driving_mode_;\n \t///油门控制输出\n \tdouble acc_value_;\n \t///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n \tint32_t accelerate_driving_mode_;\n \t///刹车值\n \tdouble brake_value_;\n \t///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n \tint32_t brake_driving_mode_;\n \t///EPB 状态控制\n \tint32_t epb_enable_;\n \t///驾驶模式:人工驾驶 1、自动驾驶 3 和辅助驾驶 2\n \tint32_t epb_driving_mode_;\n \t///档杆位置\n \tint32_t gear_lever_;\n};\n}//namespace control\n}//namespace athena\n\n#endif\n"
},
{
"alpha_fraction": 0.5932716727256775,
"alphanum_fraction": 0.6004972457885742,
"avg_line_length": 28.31662940979004,
"blob_id": "33062dad9a58c1528964418b8192e5fc2e5fff99",
"content_id": "766e286d1c7bd85df10397e2c6ff38047bd3f8d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 18315,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 439,
"path": "/athena/core/arm/Planning/include/planning/planning.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 局部规划的输入和输出接口信息。\n */\n\n#ifndef _ATHENA_PLANNING_H\n#define _ATHENA_PLANNING_H\n\n#pragma once\n\n#include <math.h>\n#include <iostream>\n#include <vector>\n\n//#include <pthread.h>\n//#include <sys/sem.h>\n\n#include <fstream>\n#include <iostream>\n\n#include \"common/navi_point.h\"\n#include \"common/path.h\"\n#include \"common/car_state.h\"\n#include \"collision_check/collision_check.h\"\n#include \"park/park.h\"\n#include \"route_data.h\"\n#include \"planning_output.h\"\n//#include \"planning_param.h\"\n#include \"config.h\"\n\n\n/**\n * @namespace athena::planning\n * @brief athena::planning\n */\nnamespace athena{\nnamespace planning{\n\n/**\n * @class Planning\n * @brief 局部路径规划的输入接口类。\n */\nclass Planning\n{\npublic:\n /**\n * @brief 构造函数\n */\n Planning();\n /**\n * @brief 析构函数\n */\n virtual ~Planning();\n\n /**\n * @brief 初始化函数。\n * @return 1表示初始化成功,0表示失败\n */\n int init();\n\n /**\n * @brief 重置函数函数。\n * @return 1表示重置成功,0表示失败\n */\n int reset_value();\n\n /**\n * @brief 输入轨迹,给virtual_path_赋值。\n * @param src_path 输入量:输入的轨迹。\n * @return 1表示成功,0表示失败\n */\n int put_virtual_path( const path& src_path );\n\n /**\n * @brief 接收地图切片信息,赋值给成员变量car_state_。\n * @param car_state 输入量:地图切片信息。\n * @return 1表示接收车辆状态信息成功,0表示失败\n */\n int put_car_state( const CarState& car_state );\n\n /**\n * @brief 接收地图切片信息,赋值给成员变量route_\n * @param src_route 输入量:地图切片信息。\n * @return 1表示接收地图切片信息成功,0表示失败\n */\n int put_map_info( const route_planning_m& src_route );\n\n /**\n * @brief 从切片信息中提取出各车道的中心线,赋值给center_lines_。\n * @return 1表示获得中心线信息成功,0表示失败\n */\n int compute_center_insert_from_route();\n\n /**\n * @brief 从切片信息中提取出各车道的中心线。\n * @param route 输入量:切片信息。\n * @param center_lines 输出量:各车道的中心线。\n * @return 1表示获得中心线信息成功,0表示失败\n */\n int compute_center_lines_from_route( route_planning_m route, vector<path>& center_lines );\n\n /**\n * @brief 对各车道的中心线进行三次样条平滑并内插,赋值给smooth_paths_。\n * @return 1表示平滑成功,0表示失败\n */\n int smooth_center_lines();\n\n /**\n * @brief 对各车道的中心线进行三次样条平滑并内插。\n * @param center_lines 输入量:各车道的中心线。\n * @param smooth_paths 输出量:平滑内插后的中心线。\n * @return 1表示平滑成功,0表示失败\n */\n int smooth_center_lines( vector<path> center_lines, vector<path>& smooth_paths );\n\n /**\n * @brief 接收路口红绿灯信息。\n * @param src_traffic_lights 输入量:路口红绿灯信息。\n * @return 1表示接收路口红绿灯信息成功,0表示失败\n */\n int put_traffic_lights( const TrafficLights& src_traffic_lights );\n\n /**\n * @brief 接收停车点/泊车点信息。\n * @param src_stop_park 输入量:停车点/泊车点信息。\n * @param stop_flag 输入量:10终点停车,否则泊车。\n * @return 1表示接收停车点/泊车点信息成功,0表示失败\n */\n int put_stop_park( const StopPark& src_stop_park, const int& stop_flag );\n\n /**\n * @brief 接收障碍物信息。\n * @param src_obstacle 输入量:障碍物信息。\n * @return 1表示接收障碍物信息成功,0表示失败\n */\n int put_obstacles_info( const SensorObstacles& src_obstacle );\n\n /**\n * @brief 过滤障碍物信息,通过地图边界信息过滤掉道路外的障碍物。赋值给road_obstacle_。\n * @param filter_obstacle_flag 输入量:障碍物过滤方式。\n * @return 1表示接收障碍物信息成功,0表示失败\n */\n int filter_obstacles_info( int filter_obstacle_flag );\n\n /**\n * @brief 根据路边界和障碍物中心点,过滤掉不在道路范围内的障碍物_。\n * @param sensor_obstacle 输入量:传感器输出的障碍物信息。\n * @param out_obstacle 输出量:过滤后的障碍物信息。\n * @return 1表示成功,0表示失败\n */\n bool filter_obstacle_by_nothing( const ObstacleInfo sensor_obstacle, ObstacleRect& out_obstacle );\n\n /**\n * @brief 根据路边界和障碍物中心点,过滤掉不在道路范围内的障碍物_。\n * @param sensor_obstacle 输入量:传感器输出的障碍物信息。\n * @param _begin 输入量:障碍物过滤的范围(起始位置)。\n * @param _end 输入量:障碍物过滤的范围(终止位置)。\n * @param out_obstacle 输出量:过滤后的障碍物信息。\n * @return 1表示障碍物在道路范围内,0表示不在\n */\n bool filter_obstacle_by_point( const ObstacleInfo sensor_obstacle, int _begin, int _end, ObstacleRect& out_obstacle );\n\n /**\n * @brief 根据路边界和障碍物矩形框,过滤掉不在道路范围内的障碍物_。\n * @param sensor_obstacle 输入量:传感器输出的障碍物信息。\n * @param _begin 输入量:障碍物过滤的范围(起始位置)。\n * @param _end 输入量:障碍物过滤的范围(终止位置)。\n * @param out_obstacle 输出量:过滤后的障碍物信息。\n * @return 1表示障碍物在道路范围内,0表示不在\n */\n bool filter_obstacle_by_rect( const ObstacleInfo sensor_obstacle, int _begin, int _end, ObstacleRect& out_obstacle );\n\n\n// /**\n// * @brief 向controll下发速度信息。\n// * @param flag 输入量:0表示直接下发地图速度信息,1表示下发规划的速度信息。\n// */\n// void output_motion_planning_speed( int flag );\n\n// /**\n// * @brief 计算规划轨迹的纵向信息(速度,加速度,档位)。\n// * @param virtual_lane 输入量:规划的虚拟轨迹。\n// * @param cur_pos 输入量:车辆在所在车道中心线上的位置。\n// * @param end_speed 输入量:规划的虚拟轨迹最后一个点在地图上的地图推荐速度。\n// * @param mstop 输入量:是否有碰撞需要停车。\n// * @param free_num 输入量:有碰撞需要停车时距离碰撞点的距离,如果没有碰撞停车就填极大值99999。\n// */\n// int cal_longitudinal_info( path& virtual_lane, int cur_pos, double end_speed, int mstop, int free_num );\n\n\n /**\n * @brief 判断是否需要进行泊车,\n * @return 1表示需要泊车,0表示不需要\n */\n int park_decision();\n\n /**\n * @brief 生成泊车轨迹,\n * @return 1表示成功,0表示失败\n */\n int generate_park_trajectory();\n\n /**\n * @brief 地图匹配函数,\n * 实时把车辆状态匹配到地图上,返回车辆在哪个车道的第几个点上。\n * @return 1表示地图匹配成功,0表示失败\n */\n int map_matching_thread();\n\n /**\n * @brief 行为决策函数,\n * 实时检测车辆与周围障碍物的碰撞关系\n * 判断规划的虚拟轨迹是否有碰撞,及最近碰撞距离;\n * 判断各车道是否有碰撞,及最近碰撞距离;\n * 最终决策出车辆的行为:是否需要换道或减速停车,如果需要换道,向哪个车道换道。\n * @return 1表示碰撞检测成功,0表示失败\n */\n int behaviour_decision_thread();\n\n /**\n * @brief 轨迹生成函数,\n * 生成轨迹簇,并计算各轨迹的代价函数,选择代价值最小的轨迹为最优轨迹。\n * @return 1表示轨迹生成成功,0表示失败\n */\n int motion_plan_virtual_lane_thread();\n\n /**\n * @brief 计算纵向速度/加速度,转弯时有减速过程\n * @param virtual_lane 输入量:传感器输出的障碍物信息。\n * @param target_speed 输入量:目标速度。\n * @param free_num 输入量:如果需要停车,停车长度\n * @return 1表示成功,0表示失败\n */\n int compute_longitudinal_info( path& virtual_lane, double target_speed, int free_num );\n\n /**\n * @brief 设置要输出的轨迹,\n * 给virtual_path_和collision_check_path_赋值\n * @param output_virtual_path 输入量:输入轨迹\n * @param tail_lat_offset 输入量:横向偏移量\n */\n int set_out_trajectory( const path& output_virtual_path, const double& tail_lat_offset );\n\n /**\n * @brief 输出接口,\n * 给out_trajectory_赋值\n */\n int get_out_trajectory();\n\n /**\n * @brief 记录out_trajectory_,\n */\n int log_out_trajectory();\n\n /**\n * @brief 计算纵向速度/加速度,转弯时有减速过程\n * @param start_num 输入量:开始位置\n * @param set_speed 输入量:设置的速度。\n * @param set_gears 输入量:设置的档位\n * @return 1表示成功,0表示失败\n */\n int set_trajectory_longitudinal( const int& start_num, double set_speed, int8_t set_gears );\n\n\n\n\n /**\n * @brief 设置地图匹配结果,\n * @param cur_pos 输入量:地图匹配得到的车辆在所在车道中心线上的位置。\n * @param en_pos 输入量:地图匹配得到的规划目标点位置(中心线上的点)。\n * @param car_in_which_line 输入量:地图匹配得到的车辆所在车道。\n * @param cur_pos_virtual_lane 输入量:地图匹配得到的车辆在虚拟车道上的位置。\n * @param st_pos_virtual_lane 输入量:地图匹配得到的虚拟车道上规划的起始点。\n * @param map_matching_error 输入量:地图匹配误差。\n * @return 1表示设置地图匹配结果成功,0表示失败\n */\n int set_map_matching_result( int cur_pos, int en_pos, int car_in_which_line, int cur_pos_virtual_lane,\n int st_pos_virtual_lane, double map_matching_error );\n\n /**\n * @brief 获得地图匹配结果,\n * @param cur_pos 输出量:地图匹配得到的车辆在所在车道中心线上的位置。\n * @param en_pos 输出量:地图匹配得到的规划目标点位置(中心线上的点)。\n * @param car_in_which_line 输出量:地图匹配得到的车辆所在车道。\n * @param cur_pos_virtual_lane 输出量:地图匹配得到的车辆在虚拟车道上的位置。\n * @param st_pos_virtual_lane 输出量:地图匹配得到的虚拟车道上规划的起始点。\n * @param map_matching_error 输出量:地图匹配误差。\n * @return 1表示获得地图匹配结果成功,0表示失败\n */\n int get_map_matching_result( int& cur_pos, int& en_pos, int& car_in_which_line, int& cur_pos_virtual_lane,\n int& st_pos_virtual_lane, double& map_matching_error );\n\n\n /**\n * @brief 设置碰撞检测结果,\n * @param free_length 输入量:如果虚拟车道有碰撞,车辆到碰撞点之前的距离。\n * @param collision_virtual_lane 输入量:规划产生的轨迹是否有碰撞。\n * @return 1表示设置碰撞检测结果成功,0表示失败\n */\n int set_collision_check_result( double free_length, bool collision_virtual_lane );\n\n /**\n * @brief 获得碰撞检测结果,\n * @param free_length 输出量:如果虚拟车道有碰撞,车辆到碰撞点之前的距离。\n * @param collision_virtual_lane 输出量:规划产生的轨迹是否有碰撞。\n * @return 1表示获得碰撞检测结果成功,0表示失败\n */\n int get_collision_check_result( double& free_length, bool& collision_virtual_lane );\n\n /**\n * @brief 获得规划生成的碰撞检测轨迹(可用于界面显示),\n * @param collision_check_path 输出量:储存规划生成的碰撞检测轨迹。\n * @return 1表示获得轨迹成功,0表示失败\n */\n int get_collision_check_path( path& collision_check_path );\n\n /**\n * @brief 泊车轨迹地图匹配\n * @return 地图匹配结果(车当前状态匹配到泊车轨迹上的点号),返回-1表示匹配失败\n */\n int park_trajectory_map_matching();\n\n /**\n * @brief 泊车轨迹碰撞检测\n * @param num_start 输入量:碰撞检测的起始位置,有地图匹配获得。\n * @return 1表示有碰撞,0表示无\n */\n int park_trajectory_collision_check( int num_start );\n\n /**\n * @brief 手动拨杆换道\n * @param direction 输入量:换道方向,-1左,1右,0不换道。\n * @return 1表示成功,0表示失败\n */\n int decide_hand_expected_lane( int direction );\n\n /**\n * @brief 平移path\n * @param src_path 输入量:平移前path。\n * @param translate_length 输入量:平移长度。\n * @return 平移后path。\n */\n path translate_path( const path& src_path, const double& translate_length );\n\n /**\n * @brief 起点或退出自动驾驶\n * @param start 输入量:0起点1退出。\n * @return 1成功0失败。\n */\n int set_start_auto( bool start );\n\npublic:\n route_planning_m route_; ///<全局规划下发的地图切片信息\n\n CarState car_state_; ///<车辆状态信息\n TrafficLights traffic_lights_; ///<路口红绿灯信息\n StopPark terminal_stop_; ///<停车点信息\n// StopPark park_stop_; ///<泊车点信息\n SensorObstacles sensor_obstacles_; ///<来自传感器的障碍物信息\n\n// bool is_car_state_put_; ///<true:已输入车辆状态信息,false:没有\n// bool is_map_info_put_; ///<true:已输入地图信息,false:没有\n// bool is_stop_park_put_; ///<true:已输入停车点/泊车点信息,false:没有\n// bool is_obstacle_info_put_; ///<true:已输入障碍物信息,false:没有\n bool is_map_updated_; ///<动态下发的地图是否更新。true:有更新\n bool is_park_; ///<是否需要泊车。true:需要泊车\n bool is_park_d_published_; ///<泊车轨迹D档部分是否发送。true:已发送\n bool is_park_r_published_; ///<泊车轨迹R档部分是否发送。true:已发送\n\n OutTrajectory out_trajectory_; ///<motion下发给controller的轨迹信息。\n double out_longitudinal_speed_; ///<motion下发给controller的纵向速度\n double out_longitudinal_length_; ///<多少米后达到速度out_longitudinal_speed_\n// LongitudinalControlInfo longitudinal_control_info_; ///<motion下发给controller的纵向信息。\n IntelligentParking intelligentparking_; ///<泊车\n path park_trajectory_d_; ///<泊车轨迹D档部分\n path park_trajectory_r_; ///<泊车轨迹R档部分\n\n\nprivate:\n std::vector<path> center_lines_; ///<所有车道的道路中心线\n std::vector<path> smooth_paths_; ///<参考中心线:平滑插值后的道路中心线\n\n double map_limit_speed_; ///<地图限速\n RoadSurface road_obstacle_; ///<路面障碍物信息\n path collision_check_path_; ///<用于做碰撞检测的轨迹\n path virtual_path_; ///<局部规划产生的虚拟轨迹\n bool is_have_virtual_path_; ///<表示是否已经有虚拟轨迹了,1是0否\n\n int cross_road_status_; ///<路口信息,0为非路口 1为路口绿灯 2为在路口且为红灯\n\n bool is_virtual_lane_collide_; ///<规划产生的轨迹是否有碰撞,1是0否\n double virtual_lane_free_length_; ///<如果虚拟车道有碰撞,车辆到碰撞点之前的距离\n\n bool is_stop_; ///<需要停车\n bool is_slow_down_; ///<需要减速\n\n double collision_obj_speed_; ///<碰撞障碍物的运行速度\n double collision_obj_heading_; ///<碰撞障碍物的运行方向\n\n int car_in_lane_; ///<本车在哪个车道,从左到右依次为0,1,2,...\n int current_position_; ///<车辆在所在车道中心线上的位置\n int end_position_; ///<规划目标点位置(中心线上的点)\n double map_matching_error_; ///<车辆匹配到地图中心线上的距离\n int current_position_virtual_lane_; ///<车辆在虚拟车道上的位置\n int start_position_virtual_lane_; ///<虚拟车道上规划的起始点\n\n// int last_expected_lane_; ///<上一次期望车道\n int expected_lane_; ///<期望车道\n// int last_hand_expected_lane_; ///<上一次手动期望车道\n int hand_expected_lane_; ///<手动期望车道\n\n// int change_lane_direction_; ///<换道方向\n// int change_lane_reason_; ///<换道原因\n// int last_change_lane_reason_; ///<上一次换道的原因\n// int change_lane_starting_lane_; ///<换道开始的车道\n int change_lane_ending_lane_; ///<换道结束的车道\n\n int drive_mode_; ///<驾驶模式,1:超车模式,2:跟车模式\n int car_action_; ///<车辆状态,1:单车,2:车队头车,3:车队其他车辆\n bool can_start_auto_; ///<启动自动驾驶\n// int can_stop_auto_; ///<退出自动驾驶\n\n double keep_last_length_; ///<保留上一次规划的长度,单位:米\n double planning_length_; ///<本次规划的长度,单位:米\n int tail_length_number_; ///<尾部拼接一段地图中心线点的个数,个\n double collision_check_length_; ///<碰撞检测长度,单位:米\n\n double next_planning_mileage_; ///<每走过多少里程更新一次轨迹,单位:米\n\n\n};\n\n\n\n} //namespace planning\n} //namespace athena\n\n#endif // _ATHENA_PLANNING_H\n\n"
},
{
"alpha_fraction": 0.6022364497184753,
"alphanum_fraction": 0.7220447063446045,
"avg_line_length": 11.27450942993164,
"blob_id": "4c5347f756e4111dd6c44837e5562bff98b8b14c",
"content_id": "b550373d845d4e43efd464fe236c55e3502f4e22",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 626,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 51,
"path": "/athena/examples/ROS/src/Perception/lane_detect/config/camera_720P_sliver_pointgrey.ini",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "[CAMERA1]\nNUM_OF_CAM=1\n\n;lateral position[mm]\nCAM_POS_X=0.0\n\n;longituidal position[mm]\nCAM_POS_Z=0.0\n\n;vertical position[mm]\nCAM_POS_Y=-1450.0\n\n;roll angle[deg]\nCAM_ROLL=0.0\n\n;0.5\n;pitch angle[deg]\nCAM_PITCH=2.0\n\n;2.0\n;yaw angle[deg]\nCAM_YAW=-0.8\n\nIMAGEWIDTH=1280\nIMAGEHEIGHT=720\nISRCMIN=0\nISRCMAX=719\nJSRCMIN=0\nJSRCMAX=1279\n\nCAM_OPT_IMG_CX=640\n\nCAM_OPT_IMG_CY=131\n\n;focal length(X) [pixel]\nCAM_PIXELF_X=2137\n\n;focal length(Y) [pixel]\nCAM_PIXELF_Y=2145\n\n;ROI red line [pixel]\nRECT_X_LEFT=100\nRECT_X_RIGHT=300\nRECT_Y=150\n\n;trapezoid [pixel]\nTRA_UP_LEFT=400\nTRA_UP_RIGHT=500\nTRA_DOWN_LEFT=200\nTRA_DOWN_RIGHT=300\nTRA_HEIGHT=150\n"
},
{
"alpha_fraction": 0.6640866994857788,
"alphanum_fraction": 0.6671826839447021,
"avg_line_length": 28.363636016845703,
"blob_id": "fea4cf2aa70a06e08e158cea7be26375abaffbcc",
"content_id": "951369ef3e26c29d1c33925d728b47ad5637d5c1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1292,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 44,
"path": "/athena/examples/LCM/Singlecar/obu/src/obu/obu_planning/draw_obu_planning.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#define KEY_ESC 27\n\nextern int x_lbefore,y_lbefore;\nextern int x_rbefore,y_rbefore;\nextern int z_before1,z_before2;\n\nextern bool buttonSaveLeft, buttonSaveMiddle, buttonSaveRight;\nextern float x_move,y_move,z_move;\nextern float x_move_save,y_move_save, z_move_save;\nextern float x_rotate,y_rotate,z_rotate;\nextern float x_rotate_save,y_rotate_save,z_rotate_save;\nextern float m_zoom;\n\nextern float m_aspect;\n\nextern float m_eyex, m_eyey, m_eyez;\nextern float m_centerx, m_centery, m_centerz;\nextern float m_upx, m_upy, m_upz;\n\n///////////////////OPEN GL control ///////////////////////////////////////\nextern int g_frame;\nextern bool g_pause;\n\nvoid DrawPath();\nvoid draw_trajectorys_motion_plan();\n\nvoid DrawText(float x, float y, float z, char * outputstring);\nvoid DrawTextRGB(float x, float y, float z, float r, float g, float b, char * outputstring);\n\n///////////////////////////////////////////////////////////////\nvoid OpenGL_Draw();\nvoid DrawCar(double x, double y, double yaw, double steer_angle);\n\nvoid MyGLDispIni();\nvoid myDisplay();\n\nvoid SpecialKey(int key, int x, int y);\nvoid MouseKey(int button, int state, int x, int y);\nvoid MouseRotate(int x, int y, int z);\nvoid PassiveMouseMove(int x, int y);\nvoid MouseMove(int x, int y);\nvoid Reshape(int w, int h);\n"
},
{
"alpha_fraction": 0.5952924489974976,
"alphanum_fraction": 0.6002107858657837,
"avg_line_length": 49.98208999633789,
"blob_id": "406203c1958c02c356c0181f1bc8d732098600c8",
"content_id": "2ce26c21591f7dec65642150165db9ef8f9e982c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 17657,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 335,
"path": "/athena/examples/LCM/Singlecar/control/apps/message_manger/lcm/lcm_message_manger.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"lcm_message_manger.h\"\n\nnamespace athena{\nnamespace control{\n void LcmMessageManger::Init(string obu_url,ControlLogic *control_logic)\n {\n control_logic_ = control_logic;\n lcm_ = new lcm::LCM(obu_url);\n\n std::cout<<\"obu_url:\"<<obu_url<<endl;\n\n if(!lcm_ -> good())\n {\n Logging::LogInfo(Logging::ERROR,\"lcm init error!\");\n return;\n }\n\n // 导航数据\n lcm_ -> subscribe(\"ins_info\", &LcmMessageManger::HandleLocalizationMessage, this);\n // 底盘反馈:控制信息\n lcm_ -> subscribe(\"vehicle_info\", &LcmMessageManger::HandleChassisDetailMessage, this);\n // 规划下发路径\n lcm_ -> subscribe(\"mt_info_report\", &LcmMessageManger::HandleTrajectoryMessage,this);\n //规划下发BCM控制\n lcm_ -> subscribe(\"mt_bcm_control_cmd\", &LcmMessageManger::HandleMtBcmControlMessage,this);\n\n //线程执行开始\n start();\n }\n\n\nvoid LcmMessageManger::PublishControlCmd(ControlCmd control_cmd)\n{\n obu_lcm::control_cmd control_cmd_msg;\n\n control_cmd_msg.steer_angle = control_cmd.steering_angle_;\n control_cmd_msg.steer_speed = control_cmd.steering_angle_speed_;\n control_cmd_msg.steer_driving_mode = control_cmd.steering_driving_mode_;\n control_cmd_msg.accel_value = control_cmd.acc_value_;\n control_cmd_msg.accel_driving_mode = control_cmd.accelerate_driving_mode_;\n control_cmd_msg.brake_value = control_cmd.brake_value_;\n //std::cout<<\"steering_angle:\"<<control_cmd_msg.steering_angle <<endl;\n //std::cout<<\"steering_driving_mode:\"<<control_cmd_msg.steering_driving_mode <<endl;\n control_cmd_msg.brake_driving_mode = control_cmd.brake_driving_mode_;\n control_cmd_msg.epb_enable = control_cmd.epb_enable_;\n control_cmd_msg.epb_driving_mode = control_cmd.epb_driving_mode_;\n control_cmd_msg.gear_level = control_cmd.gear_lever_;\n\n lcm_ -> publish(\"control_cmd\", &control_cmd_msg);\n}\n\nvoid LcmMessageManger::PublishBcmControlCmd(BcmControlCmd bcm_control_cmd)\n{\n obu_lcm::bcm_control_cmd bcm_control_cmd_msg;\n\n bcm_control_cmd_msg.speaker_control = bcm_control_cmd.speaker_control_;\n bcm_control_cmd_msg.high_beam_ctrl = bcm_control_cmd.high_beam_ctrl_;\n bcm_control_cmd_msg.low_beam_ctrl = bcm_control_cmd.low_beam_ctrl_;\n bcm_control_cmd_msg.left_turn_ctrl = bcm_control_cmd.left_turn_ctrl_;\n bcm_control_cmd_msg.right_turn_ctrl = bcm_control_cmd.right_turn_ctrl_;\n bcm_control_cmd_msg.front_wiper_ctrl = bcm_control_cmd.front_wiper_ctrl_;\n bcm_control_cmd_msg.rear_wiper_ctrl = bcm_control_cmd.rear_wiper_ctrl_;\n bcm_control_cmd_msg.position_lamp_ctrl = bcm_control_cmd.position_lamp_ctrl_;\n bcm_control_cmd_msg.front_fog_lamp_ctrl = bcm_control_cmd.front_fog_lamp_ctrl_;\n bcm_control_cmd_msg.rear_fog_lamp_ctrl = bcm_control_cmd.rear_fog_lamp_ctrl_;\n bcm_control_cmd_msg.brake_lamp_ctrl = bcm_control_cmd.brake_lamp_ctrl_;\n bcm_control_cmd_msg.alarm_lamp_ctrl = bcm_control_cmd.alarm_lamp_ctrl_;\n bcm_control_cmd_msg.lf_door_ctrl = bcm_control_cmd.lf_door_ctrl_;\n bcm_control_cmd_msg.rf_door_ctrl = bcm_control_cmd.rf_door_ctrl_;\n bcm_control_cmd_msg.lr_door_ctrl = bcm_control_cmd.lr_door_ctrl_;\n bcm_control_cmd_msg.rr_door_ctrl = bcm_control_cmd.rr_door_ctrl_;\n\n lcm_ -> publish(\"bcm_control_cmd\", &bcm_control_cmd_msg);\n}\n\nvoid LcmMessageManger::PublishControlInfoReport(ControlInfoReport control_info_report)\n{\n obu_lcm::control_info_report control_info_report_msg;\n memset(&control_info_report_msg,0,sizeof(obu_lcm::control_info_report));\n control_info_report_msg.lon = control_info_report.cur_lon_;\n control_info_report_msg.lat = control_info_report.cur_lat_;\n control_info_report_msg.yaw = control_info_report.cur_yaw_;\n// control_info_report_msg.cur_brake = control_info_report.cur_brake_;//该字段改为当前期望的刹车值 20180604\n// // control_info_report_msg.cur_speed = control_info_report.Current_Speed;\n// control_info_report_msg.cur_speed = control_info_report.cur_speed_;//modfly by alex send CAN car speed to netwok,instead ins speed\n// control_info_report_msg.cur_speed_lateral = control_info_report.cur_speed_lateral_;\n// control_info_report_msg.cur_speed_longitudinal = control_info_report.cur_speed_longitudinal_;\n// control_info_report_msg.cur_acceleration_pattern = control_info_report.cur_acceleration_pattern_;\n// control_info_report_msg.cur_acceleration = control_info_report.cur_acceleration_;\n// control_info_report_msg.cur_acceleration_lateral = control_info_report.cur_acceleration_lateral_;\n// control_info_report_msg.cur_acceleration_longitudinal = control_info_report.cur_acceleration_longitudinal_;//modefy by alex20160927\n// control_info_report_msg.steering_angle = control_info_report.steering_angle_;\n// control_info_report_msg.flashing_status = control_info_report.flashing_status_ + control_info_report.flashing_status_;\n// //control_info_report_msg.cur_gears = control_info_report.Current_gears;\n// control_info_report_msg.cur_gears = control_info_report.cur_gears_;//将档杆位置通过这个接口发送出去add by alex20160927\n// control_info_report_msg.gps_time = control_info_report.gps_time_;\n// control_info_report_msg.mileage = control_info_report.mileage_;//add by alex20160927 增加里程数发送\n// //control_info_report_msg.num_of_camera_stat = control_info_report.num_of_camera_stat_;\n//// control_info_report_msg.camera_stat.assign(control_info_report.camera_stat_.begin(),control_info_report.camera_stat_.end());\n//// control_info_report_msg.num_of_radar_stat = control_info_report.num_of_radar_stat_;\n//// control_info_report_msg.radar_stat.assign(control_info_report.radar_stat_.begin(),control_info_report.radar_stat_.end());\n//// control_info_report_msg.num_of_lidar_stat = control_info_report.num_of_lidar_stat_;\n//// control_info_report_msg.lidar_stat.assign(control_info_report.lidar_stat_.begin(),control_info_report.lidar_stat_.end());\n//// control_info_report_msg.gps_stat.assign(control_info_report.gps_stat_.begin(),control_info_report.gps_stat_.end());\n//// control_info_report_msg.num_of_gps_stat = control_info_report_msg.num_of_gps_stat;\n// control_info_report_msg.eps_stat = control_info_report.eps_stat_;\n// control_info_report_msg.epb_stat = control_info_report.epb_stat_;\n// control_info_report_msg.brake_stat = control_info_report.brake_stat_;\n lcm_ -> publish(\"control_info_report\", &control_info_report_msg);\n}\n\nvoid LcmMessageManger::PublishEmergencyCmd(Emergency emergency)\n{\n obu_lcm::emergency emergency_msg;\n\n emergency_msg.emergency_mode = emergency.emergency_mode_;\n emergency_msg.emergency_level = emergency.emergency_level_;\n emergency_msg.emergency_value = emergency.emergency_value_;\n\n lcm_ -> publish(\"emergency\", &emergency_msg);\n}\n\n\nvoid LcmMessageManger::HandleLocalizationMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::ins_info * msg)\n{\n Localization localization;\n\n ///GPS时间\n localization.gps_time_ = msg -> gps_time;\n ///周\n localization.week_ = msg -> week;\n ///经度\n localization.lat_ = msg -> lat;\n ///纬度\n localization.lon_ = msg -> lon;\n ///海拔\n localization.height_ = msg -> height;\n ///横向速度\n localization.lateral_speed_ = msg -> lateral_speed;\n ///纵向速度\n localization.longitudinal_speed_ = msg -> longitudinal_speed;\n ///地向速度\n localization.down_speed_ = msg -> down_speed;\n ///横滚角度\n localization.roll_ = msg -> roll;\n ///俯仰角度\n localization.pitch_ = msg -> pitch;\n ///航向角度\n localization.heading_ = msg -> heading;\n ///横向加速度\n localization.lateral_accelerate_ = msg -> lateral_accelerate;\n ///纵向加速度\n localization.longitudinal_accelerate_ = msg -> longitudinal_accelerate;\n ///地向加速度\n localization.down_accelerate_ = msg -> down_accelerate;\n ///横滚角速度\n localization.roll_speed_ = msg -> roll_speed;\n ///俯仰角速度\n localization.pitch_speed_ = msg -> pitch_speed;\n ///航向角速度\n localization.heading_speed_ = msg -> heading_speed;\n\n control_logic_ -> SubscribeLocalization(localization);\n usleep(10);\n}\n\nvoid LcmMessageManger::HandleChassisDetailMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::vehicle_info* msg)\n{\n ChassisDetail chassis_detail;\n chassis_detail.chassis_error_ = msg -> chassis_error;\n// chassis_detail.wheel_speed_fl_ = msg -> wheel_speed_fl;\n// chassis_detail.wheel_speed_fr_ = msg -> wheel_speed_fr;\n// chassis_detail.wheel_speed_bl_ = msg -> wheel_speed_bl;\n// chassis_detail.wheel_speed_br_ = msg -> wheel_speed_br;\n chassis_detail.car_speed_ = msg -> vehicle_speed;\n chassis_detail.eng_rpm_ = msg -> eng_rpm;\n// chassis_detail.acc_pedal_pos_ = msg -> acc_pedal_pos;\n// chassis_detail.throttle_pos_feedback_ = msg -> throttle_pos_feedback;\n// chassis_detail.eng_torq_without_tcured_ = msg -> eng_torq_without_tcured;\n// chassis_detail.batt_volt_v_ = msg -> batt_volt_v;\n// chassis_detail.driver_torque_ = msg -> driver_torque;\n// chassis_detail.eng_actual_ind_torque_ = msg -> eng_actual_ind_torque;\n// chassis_detail.friction_torque_ = msg -> friction_torque;\n// chassis_detail.torque_limitation_ = msg -> torque_limitation;\n// chassis_detail.torque_reduction_ = msg -> torque_reduction;\n// chassis_detail.turbine_rpm_ = msg -> turbine_rpm;\n// chassis_detail.brake_pressure_ = msg -> brake_pressure;\n// chassis_detail.engine_running_status_ = msg -> engine_running_status;\n// chassis_detail.gear_level_ = msg -> gear_level;\n chassis_detail.at_gear_ = msg -> at_status;\n chassis_detail.brake_status_ = msg -> brake_status;\n// chassis_detail.epb_brake_status_ = msg -> epb_brake_status;\n// chassis_detail.dfco_ = msg -> dfco;\n// chassis_detail.idle_status_ = msg -> idle_status;\n// chassis_detail.tcu_torque_red_availability_ = msg -> tcu_torque_red_availability;\n// chassis_detail.eng_torque_failure_ = msg -> eng_torque_failure;\n// chassis_detail.ems_released_ = msg -> ems_released;\n// chassis_detail.eng_started_ = msg -> eng_started;\n// chassis_detail.torque_reduction_flag_ = msg -> torque_reduction_flag;\n// chassis_detail.torque_limitation_flag_ = msg -> torque_limitation_flag;\n// chassis_detail.gear_eng_agement_ = msg -> gear_eng_agement;\n// chassis_detail.tcu_state_ = msg -> tcu_state;\n// chassis_detail.ebd_work_ = msg -> ebd_work;\n// chassis_detail.abs_ebdlable_ = msg -> abs_ebdlable;\n chassis_detail.steering_angle_feedback_ = msg -> steer_angle;\n// chassis_detail.steering_angle_speed_feedback_ = msg -> steering_angle_speed_feedback;\n// chassis_detail.steering_driving_mode_feedback_ = msg -> steering_driving_mode_feedback;\n// chassis_detail.brake_value_feedback_ = msg -> brake_value_feedback;\n// chassis_detail.brake_run_time_feedback_ = msg -> brake_run_time_feedback;\n// chassis_detail.acc_driving_mode_feedback_ = msg -> acc_driving_mode_feedback;\n// chassis_detail.epb_driving_mode_feedback_ = msg -> epb_driving_mode_feedback;\n// chassis_detail.speaker_status_ = msg -> speaker_status;\n// chassis_detail.high_beam_status_ = msg -> high_beam_status;\n// chassis_detail.low_beam_status_ = msg -> low_beam_status;\n chassis_detail.left_turn_status_ = msg -> left_turn_status;\n chassis_detail.right_turn_status_ = msg -> right_turn_status;\n// chassis_detail.front_wiper_status_ = msg -> front_wiper_status;\n// chassis_detail.rear_wiper_status_ = msg -> rear_wiper_status;\n// chassis_detail.position_lamp_status_ = msg -> position_lamp_status;\n// chassis_detail.front_fog_lamp_status_ = msg -> front_fog_lamp_status;\n// chassis_detail.rear_fog_lamp_status_ = msg -> rear_fog_lamp_status;\n// chassis_detail.brake_lamp_status_ = msg -> brake_lamp_status;\n// chassis_detail.alarm_lamp_status_ = msg -> alarm_lamp_status;\n// chassis_detail.lf_door_status_ = msg -> lf_door_status;\n// chassis_detail.rf_door_status_ = msg -> rf_door_status;\n// chassis_detail.lr_door_status_ = msg -> lr_door_status;\n// chassis_detail.rr_door_status_ = msg -> rr_door_status;\n\n\n control_logic_ -> SubscribeChassis(chassis_detail);\n usleep(10);\n}\n\nvoid LcmMessageManger::HandleTrajectoryMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::mt_info_report* msg)\n{\n int32_t trajectory_length = msg -> points.size();\n //msg.num_of_points\n if(trajectory_length < 10 || trajectory_length > 5000)\n {\n Logging::LogInfo(Logging::WARNING,\"mt_info_report message length error\");\n }\n\n NavPoints point;\n Trajectory trajectory;\n\n //GPS时间\n trajectory.gps_time_ = msg -> gps_time;\n //轨迹点数量\n trajectory.num_of_points_ = msg -> num_of_points;\n //轨迹点类型\n trajectory.type_ = msg -> type;\n //保留\n trajectory.reserved_ = msg -> reserved;\n trajectory.car_action_ = msg -> car_action;\n //模式 0 人工驾驶 1自动驾驶\n trajectory.driving_mode_ = msg -> driving_mode;\n for(auto it = msg->points.begin(); it != msg->points.end(); ++it)\n {\n point.p_x_ = it -> p_x;\n point.p_y_ = it -> p_y;\n\n point.p_h_ = it -> p_h;\n\n point.p_k_ = it -> p_k;\n point.p_k_ = it -> p_k;\n point.p_v_ = it -> p_v;\n point.p_a_ = it -> p_a;\n\n point.s_ = it -> s;\n point.p_g_ = it -> p_g;\n point.p_h_ = it -> p_h;\n point.p_k_ = it -> p_k;\n\n //cout<<\"tar_speed: \"<<it -> p_v<<endl;\n trajectory.points_.push_back(point);\n }\n\n control_logic_ -> SubscribeTrajectory(trajectory);\n usleep(10);\n}\n\nvoid LcmMessageManger::HandleMtBcmControlMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan,\n const obu_lcm::mt_bcm_control_cmd* msg)\n{\n BcmControlCmd bcm_control_cmd;\n ///喇叭控制 0 禁声音 1 鸣笛\n bcm_control_cmd.speaker_control_ = msg -> speaker_control;\n ///远光灯 0 关闭 1 开启\n bcm_control_cmd.high_beam_ctrl_ = msg -> high_beam_ctrl;\n ///近光灯 0 关闭 1 开启\n bcm_control_cmd.low_beam_ctrl_ = msg -> low_beam_ctrl;\n ///左转向灯 0 关闭 1 开启\n bcm_control_cmd.left_turn_ctrl_ = msg -> left_turn_ctrl;\n ///右转向灯 0 关闭 1 开启\n bcm_control_cmd.right_turn_ctrl_ = msg -> right_turn_ctrl;\n ///前雨刮器 0 关闭 1 开启\n\tbcm_control_cmd.front_wiper_ctrl_ = msg -> front_wiper_ctrl;\n ///后雨刮器 0 关闭 1 开启\n bcm_control_cmd.rear_wiper_ctrl_ = msg -> rear_wiper_ctrl;\n ///位置灯 0 关闭 1 开启\n bcm_control_cmd.position_lamp_ctrl_ = msg -> position_lamp_ctrl;\n ///前雾灯 0 关闭 1 开启\n bcm_control_cmd.front_fog_lamp_ctrl_ = msg -> front_fog_lamp_ctrl;\n ///后雾灯 0 关闭 1 开启\n bcm_control_cmd.rear_fog_lamp_ctrl_ = msg -> rear_fog_lamp_ctrl;\n ///刹车灯 一般情况自动控制 0 关闭 1 开启\n bcm_control_cmd.brake_lamp_ctrl_ = msg -> brake_lamp_ctrl;\n ///警报灯 双闪 0 关闭 1 开启\n bcm_control_cmd.alarm_lamp_ctrl_ = msg -> alarm_lamp_ctrl;\n /// 左前门控制 0 关闭 1 开启\n bcm_control_cmd.lf_door_ctrl_ = msg -> lf_door_ctrl;\n /// 右前门控制 0 关闭 1 开启\n bcm_control_cmd.rf_door_ctrl_ = msg -> rf_door_ctrl;\n /// 左后门控制 0 关闭 1 开启\n bcm_control_cmd.lr_door_ctrl_ = msg -> lr_door_ctrl;\n /// 右后门控制 0 关闭 1 开启\n bcm_control_cmd.rr_door_ctrl_ = msg -> rr_door_ctrl;\n\n control_logic_ -> SubscribeBcmControl(bcm_control_cmd);\n usleep(10);\n}\n\nvoid LcmMessageManger::run()\n{\n while( 0 == lcm_->handle());\n}\n}\n}\n"
},
{
"alpha_fraction": 0.6647921800613403,
"alphanum_fraction": 0.6736099720001221,
"avg_line_length": 37.67824935913086,
"blob_id": "d62ec67d772b6fab1c10b3f7297774b60b98dc2d",
"content_id": "4cf351cfc688799cf9fc8a6a9f2df386b978f64d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 30189,
"license_type": "no_license",
"max_line_length": 337,
"num_lines": 777,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneArea.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "BIG5",
"text": "#ifndef _LANE_AREA_H_\n#define _LANE_AREA_H_\n\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerPoint.h\"\n#include \"LaneMarkerPoints.h\"\n#include \"LaneMarkerLineSequence.h\"\n#include \"LaneMarkerLineSequences.h\"\n\n#include \"LaneParameter.h\"\n#include \"LaneParameterOneSide.h\"\n#include \"LaneRegion.h\"\n#include \"../utils/GridMap1D.h\"\n#include \"../utils/OutputInfo.h\"\n#include <iostream>\nusing namespace std;\n\nclass LaneArea\n{\nprivate:\n LaneRegion _Regions[NF_NUM];\n\n LaneMarkerLineSequences *_pLaneMerkerLineSequences[LR_NUM];\n LaneMarkerLineSequence *_pLaneMerkerLineSequence3D[LR_NUM];\n LaneMarkerLine *_pLaneMerkerLine3D[LR_NUM];\n\n LaneMarkerPoints *_pLaneBoundaryPointsToEstimateLaneParameter[LR_NUM];\t// &每?H?p?㏑???[?^??*豕〞p?足?G?b?W※_???X?g(???E㏑E)\n LaneMarkerPoints *_pLaneBoundaryPointsInFarArea[LR_NUM];\t\t\t\t\t// ㏑※??〞足??(*??邦???〞〞足???豕㏑※)?足?G?b?W※_???X?g(???E㏑E)\n LaneMarkerPoints *_pLaneBoundaryPointsInNearArea[LR_NUM];\t\t\t\t\t// ??每T〞足???足?G?b?W※_???X?g(???E㏑E)(?p?㏑???[?^?足???谷*T??)\n LaneMarkerPoints *_pLaneBoundaryPoints[LR_NUM];\t\t\t\t\t// ???邦???E※_???X?g(???E㏑E)(?p?㏑???[?^?足???谷*T??)\n\n LaneParameter *_pLaneParameter;\t\t\t\t\t\t\t\t// &每?H?p?㏑???[?^(〞?&∟???邦???E〞L)\n LaneParameterOneSide *_pLaneParameterOneSide[LR_NUM];\t\t// ?D&∟???邦???E?谷???谷&每?H?p?㏑???[?^(???E㏑E)\n\n double _adPlane3D[3];\t// a,c,d\t\t\t\t\t// ?H每那??每那?足?p?㏑???[?^\n double _adSurface3D[4];\t// alpha,cv,gamma,d\t\t\t// ?H每那???E?足?p?㏑???[?^\n\n int _iLaneBoundaryFoundCounter[LR_NUM];\n int _iLaneBoundaryLostCounter[LR_NUM];\n int _iLaneBoundaryType[LR_NUM][2];\n\n int _iLaneBoundarySolidCounter[LR_NUM];\n int _iLaneBoundaryDashCounter[LR_NUM];\n\n\n LaneMarkerPoints *_pEdgePointsOnRoad;\n int _iIsrcMin;\n int _iIsrcMax;\n int _iJsrcLeft_At_IsrcMin;\n int _iJsrcRight_At_IsrcMin;\n int _iJsrcLeft_At_IsrcMax;\n int _iJsrcRight_At_IsrcMax;\n\n double _dSearchMarginInNearArea;\n double _dSearchMarginInFarArea;\n\n double _dMaxDiffFromMedianDeviationInNearArea;\n double _dMaxDiffFromMedianDeviationInFarArea;\n\n int _iEdgeStrengthOfLaneBoundary[LR_NUM];\n BOOL _bSearchInDefaultLanePosition[LR_NUM];\n\n int _iBothSideLostCounter;\n int _iLaneParameterType;\n\n double _dPitchEstimated;\n double _dPitchPredicted;\n\n int *_piProcLine;\n int _iProcLineNumber;\n\n int _iLineNumberFarArea;\n int\t_iLineNumberNearArea;\n\n int _iIsrcAvailable[LR_NUM];\n\n FlexArray<int>\t*_faTopIntensity[LR_NUM];\n\n GridMap1D *_pGridMap1D;\n OutputInfo _OutputInfo;\n\n#ifdef\tUSE_EDGEPOINT2D\n LaneMarkerPoint **_ppEdgePoint2D;\n#endif\tUSE_EDGEPOINT2D\n\npublic:\n inline LaneArea(void)\n {\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneBoundaryPointsToEstimateLaneParameter[iLR] = NULL;\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneBoundaryPointsInFarArea[iLR] = NULL;\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneBoundaryPointsInNearArea[iLR] = NULL;\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneBoundaryPoints[iLR] = NULL;\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneMerkerLineSequences[iLR] = NULL;\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneMerkerLineSequence3D[iLR] = NULL;\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneMerkerLine3D[iLR] = NULL;\n }\n }\n\n _pLaneParameter = NULL;\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _pLaneParameterOneSide[iLR] = NULL;\n }\n }\n\n _adPlane3D[0] = 0.0;\n _adPlane3D[1] = 0.0;\n _adPlane3D[2] = 0.0;\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _iLaneBoundaryFoundCounter[iLR] = 0;\n _iLaneBoundaryLostCounter[iLR] = 0;\n _iLaneBoundarySolidCounter[iLR] = 0;\n _iLaneBoundaryDashCounter[iLR] = 0;\n\n }\n }\n _iLaneBoundaryType[LR_LEFT][0] = LBT_NONE;\n _iLaneBoundaryType[LR_LEFT][1] = LBT_NONE;\n _iLaneBoundaryType[LR_RIGHT][0] = LBT_NONE;\n _iLaneBoundaryType[LR_RIGHT][1] = LBT_NONE;\n\n _pEdgePointsOnRoad = NULL;\n\n _iIsrcMin = 0;\n _iIsrcMax = 0;\n _iJsrcLeft_At_IsrcMin = 0;\n _iJsrcRight_At_IsrcMin = 0;\n _iJsrcLeft_At_IsrcMax = 0;\n _iJsrcRight_At_IsrcMax = 0;\n\n _dSearchMarginInNearArea = DEFAULT_SEARCH_MARGIN_IN_NEAR_AREA;\n _dSearchMarginInFarArea = DEFAULT_SEARCH_MARGIN_IN_FAR_AREA;\n\n _dMaxDiffFromMedianDeviationInNearArea = DEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_NEAR_AREA;\n _dMaxDiffFromMedianDeviationInFarArea = DEFAULT_MAX_DIFF_FROM_MEDIAN_DEVIATION_IN_FAR_AREA;\n\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _iEdgeStrengthOfLaneBoundary[iLR] = 0;\n _bSearchInDefaultLanePosition[iLR] = FALSE;\n _iIsrcAvailable[iLR] = 0;\n }\n _iBothSideLostCounter = 0;\n _iLaneParameterType = LPT_NONE;\n _dPitchEstimated = 0.;\n _dPitchPredicted = 0.;\n\n _piProcLine = NULL;\n _iProcLineNumber = 0;\n\n _iLineNumberFarArea = 0;\n _iLineNumberNearArea = 0;\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n _faTopIntensity[iLR] = new FlexArray<int>;\n }\n _pGridMap1D = new GridMap1D();\n#ifdef\tUSE_EDGEPOINT2D\n _ppEdgePoint2D = NULL;\n#endif\tUSE_EDGEPOINT2D\n }\n inline ~LaneArea(void)\n {\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneBoundaryPointsToEstimateLaneParameter[iLR]);\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneBoundaryPointsInFarArea[iLR]);\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneBoundaryPointsInNearArea[iLR]);\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneBoundaryPoints[iLR]);\n }\n }\n SAFE_DELETE(_pLaneParameter);\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneParameterOneSide[iLR]);\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneMerkerLineSequences[iLR]);\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneMerkerLineSequence3D[iLR]);\n }\n }\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneMerkerLine3D[iLR]);\n }\n }\n\n SAFE_DELETE(_pEdgePointsOnRoad);\n SAFE_DELETE(_piProcLine);\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_faTopIntensity[iLR]);\n }\n SAFE_DELETE(_pGridMap1D);\n#ifdef\tUSE_EDGEPOINT2D\n SAFE_DELETE_ARRAY(_ppEdgePoint2D);\n#endif\tUSE_EDGEPOINT2D\n }\n inline void initialize(void)\n {\n for(int iNF = 0; iNF < NF_NUM; iNF++)\n {\n Region(iNF)->initialize();\n }\n getLaneParameter()->initialize();\n }\n inline void initialize(PARAM_CAM *pCamParam)\n {\n for(int iNF = 0; iNF < NF_NUM; iNF++)\n {\n Region(iNF)->initialize();\n }\n setupVotingThreshold();\n SAFE_DELETE(_pLaneParameter);\n _pLaneParameter = new LaneParameter(pCamParam);\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n SAFE_DELETE(_pLaneParameterOneSide[iLR]);\n int iK = iLR == LR_LEFT ? -1 : 1;\n _pLaneParameterOneSide[iLR] = new LaneParameterOneSide(pCamParam, iK);\n }\n }\n }\n inline void setupVotingThreshold(void)\n {\n for(int iNF = 0; iNF < NF_NUM; iNF++)\n {\n for(int iLR = 0; iLR < LR_NUM; iLR++)\n {\n for(int iUD = 0; iUD < UD_NUM; iUD++)\n {\n#ifndef\tFOR_TME\n int iVT = Region(iNF)->LineNumber() / VOTING_THRESHOLD_RATIO;\n//\t\t\t\t\tint iVT = Region(iNF)->LineNumber() / 10;\t// 20111020\n#else\tFOR_TME\n int iVT = Region(iNF)->LineNumber() / 10;\n#endif\tFOR_TME\n Region(iNF)->Side(iLR)->BB(iUD)->VotingThreshold(iVT);\n }\n }\n }\n }\n inline LaneRegion *Region(int iNF)\n {\n return &(_Regions[iNF]);\n }\n\n inline LaneParameter *getLaneParameter(void)\n {\n return _pLaneParameter;\n }\n inline LaneParameterOneSide *getLaneParameterOneSide(int iLR)\n {\n return _pLaneParameterOneSide[iLR];\n }\n\n inline void setLaneBoundaryPointsToEstimateLaneParameter(int iLR, LaneMarkerPoints *p)\n {\n SAFE_DELETE(_pLaneBoundaryPointsToEstimateLaneParameter[iLR]);\n _pLaneBoundaryPointsToEstimateLaneParameter[iLR] = p;\n }\n inline LaneMarkerPoints *getLaneBoundaryPointsToEstimateLaneParameter(int iLR)\n {\n return _pLaneBoundaryPointsToEstimateLaneParameter[iLR];\n }\n inline void setLaneBoundaryPointsInFarArea(int iLR, LaneMarkerPoints *p)\n {\n SAFE_DELETE(_pLaneBoundaryPointsInFarArea[iLR]);\n _pLaneBoundaryPointsInFarArea[iLR] = p;\n }\n inline void setLaneBoundaryPointsInNearArea(int iLR, LaneMarkerPoints *p)\n {\n SAFE_DELETE(_pLaneBoundaryPointsInNearArea[iLR]);\n _pLaneBoundaryPointsInNearArea[iLR] = p;\n }\n inline void setLaneBoundaryPoints(int iLR, LaneMarkerPoints *p)\n {\n SAFE_DELETE(_pLaneBoundaryPoints[iLR]);\n _pLaneBoundaryPoints[iLR] = p;\n }\n\n inline LaneMarkerPoints *getLaneBoundaryPointsInNearArea(int iLR)\n {\n return _pLaneBoundaryPointsInNearArea[iLR];\n }\n inline LaneMarkerPoints *getLaneBoundaryPointsInFarArea(int iLR)\n {\n return _pLaneBoundaryPointsInFarArea[iLR];\n }\n inline LaneMarkerPoints *getLaneBoundaryPoints(int iLR)\n {\n return _pLaneBoundaryPoints[iLR];\n }\n inline void setLaneMarkerLineSequences(int iLR, LaneMarkerLineSequences *p)\n {\n SAFE_DELETE(_pLaneMerkerLineSequences[iLR]);\n _pLaneMerkerLineSequences[iLR] = p;\n }\n inline LaneMarkerLineSequences *getLaneMarkerLineSequences(int iLR)\n {\n return _pLaneMerkerLineSequences[iLR];\n }\n inline void setLaneMarkerLineSequence3D(int iLR, LaneMarkerLineSequence *p)\n {\n SAFE_DELETE(_pLaneMerkerLineSequence3D[iLR]);\n _pLaneMerkerLineSequence3D[iLR] = p;\n }\n inline LaneMarkerLineSequence *getLaneMarkerLineSequence3D(int iLR)\n {\n return\t_pLaneMerkerLineSequence3D[iLR];\n }\n inline void setLaneMarkerLine3D(int iLR, LaneMarkerLine *p)\n {\n SAFE_DELETE(_pLaneMerkerLine3D[iLR]);\n _pLaneMerkerLine3D[iLR] = p;\n }\n inline LaneMarkerLine *getLaneMarkerLine3D(int iLR)\n {\n return\t_pLaneMerkerLine3D[iLR];\n }\n inline LaneMarkerPoints *getEdgePointsOnRoad(void)\n {\n return _pEdgePointsOnRoad;\n }\n inline void setEdgePointsOnRoad(LaneMarkerPoints *p)\n {\n SAFE_DELETE(_pEdgePointsOnRoad);\n _pEdgePointsOnRoad = p;\n }\n\n void detectLaneMarkerPointsOneLine(PARAM_CAM *pCamParam, int iIsrc, int iProcLineIndex, Uchar *pImageAll, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity);\n BOOL detectLaneMarkerPoints(PARAM_CAM *pCamParam, Uchar *pImage, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL);\n BOOL detectLaneMarkers(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Uchar *pucInputImage);\n BOOL selectLaneMarkers(void);\n BOOL selectLaneMarkers(LaneParameter *pLaneParameter, LaneParameterOneSide *pLeftLaneParameter, LaneParameterOneSide *pRightLaneParameter);\n void detectLaneMarkerPointsOneLine(PARAM_CAM *pCamParam,int iIsrc,int iProcLineIndex,Uchar *pImageAll,LaneMarkerPoints *pUpEdgePoints,LaneMarkerPoints *pDownEdgePoints,int iPositiveThresholdLeft,int iNegativeThresholdLeft,int iPositiveThresholdRight,int iNegativeThresholdRight,Disparity *pDisparity,LaneMarkerPoint **ppEdgePoint2D);\n LaneMarker *searchLaneMarkerForLaneBoundaryByVotes(LaneMarkers *pLMs, int iUD, int iOffsetCenter, int iOffsetMin, int iOffsetMax, int iYawCenter, int iYawMin, int iYawMax);\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundaryByVotes(LaneMarkerLineSequences *pLMLSs, int iUD, int iBottomXCenter, int iBottomXMin, int iBottomXMax, int iYawCenter, int iYawMin, int iYawMax);\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundary2(LaneMarkerLineSequences *pLMLSs, int iUD,int iNearBottomXCenter, int iNearBottomXMin, int iNearBottomXMax,int iNearTopXCenter, int iNearTopXMin, int iNearTopXMax);\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundaryByVotes2(LaneMarkerLineSequences *pLMLSs, int iUD,int iNearBottomXCenter, int iNearBottomXMin, int iNearBottomXMax,int iNearTopXCenter, int iNearTopXMin, int iNearTopXMax);\n LaneMarkerLine *searchLaneMarkerLineForLaneBoundary2(LaneMarkerLines *pLMLs,int iBottomXCenter, int iBottomXMin, int iBottomXMax,int iTopXCenter, int iTopXMin, int iTopXMax);\n double searchMedian(FlexArray<double> *pfadDeviations);\n LaneMarker *searchLaneMarkerForLaneBoundary(LaneMarkers *pLMs,int iUD,int iOffsetCenter,int iOffsetMin,int iOffsetMax,int iYawCenter,int iYawMin,int iYawMax);\n LaneMarkerLine *searchLaneMarkerLineForLaneBoundary(LaneMarkerLines *pLMLs, int iOffsetCenter, int iOffsetMin, int iOffsetMax, int iYawCenter, int iYawMin, int iYawMax);\n LaneMarker *searchLaneMarkerForLaneBoundaryByVotes(LaneMarkers *pLMs,int iUD,int iBottomXCenter,int iBottomXMin,int iBottomXMax,int iTopZ,int iTopXCenter,int iTopXMin,int iTopXMax);\n int calcAverageOfHighIntensityArea(PARAM_CAM *pCamParam, int iLR, Uchar *pInputImage, int iIsrc);\n\n\n double calcYawDiffNearAndFar(int iNF0, int iNF1);\n double calcYawDiffNearAndFar(int iLR, int iNF0, int iNF1);\n int calcNewThOfEdgeStrength(LaneMarkerPoints *pLMPs);\n int calcEdgeStrength(PARAM_CAM *pCamParam, int iIsrc, Uchar *pImage, int iJsrc);\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundary(LaneMarkerLineSequences *pLMLSs,int iUD,int iBottomXCenter,int iBottomXMin,int iBottomXMax, int iYawCenter, int iYawMin, int iYawMax);\n LaneMarker *searchLaneMarkerForLaneBoundaryByInitialParameter(int iNF, int iLR);\t\t\t\t\t\t\t// 01\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundaryByInitialParameter(int iLR);\t\t\t\t// 02\n LaneMarker *searchLaneMarkerForLaneBoundaryByNearerRegion(int iNF, int iLR);\t\t\t\t\t\t\t\t// 03\n LaneMarker *searchLaneMarkerForLaneBoundaryByPreviousFrame(int iNF, int iLR);\t\t\t\t\t\t\t\t// 04\n LaneMarker *searchLaneMarkerForLaneBoundaryByPreviousFurtherFrame(int iNF, int iLR);\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundaryByPreviousFrame(int iLR);\t\t\t\t// 05\n LaneMarker *searchLaneMarkerForLaneBoundaryByParameter(int iNF, int iLR);\t\t\t\t\t\t\t\t\t// 06\n LaneMarker *searchLaneMarkerForLaneBoundaryByNearerRegionAndParameter(int iNF, int iLR);\t\t\t\t\t// 07\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceForLaneBoundaryByParameter(int iLR);\t\t\t\t\t// 08\n LaneMarkerLine *searchLaneMarkerLineForLaneBoundaryByParameter(int iNF, int iLR, int iUD);\t\t\t\t\t// 09\n LaneMarkerLine *searchLaneMarkerLineForLaneBoundaryByNearerRegionAndParameter(int iNF, int iLR, int iUD);\t// 10\n\n BOOL pickupLaneBoundaryPoints(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n//\tLaneMarkerPoints *searchLaneBoundaryPointsInFarArea(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL);\n BOOL eliminateIsoletedBoundaryPointsInFarArea(PARAM_CAM *pCamParam, LaneMarkerPoints *pLMPs);\n BOOL eliminateIsoletedBoundaryPointsInFarArea(PARAM_CAM *pCamParam);\n BOOL eliminateIsoletedBoundaryPointsInNearArea(PARAM_CAM *pCamParam);\n LaneMarkerPoints *searchLaneBoundaryPointsInFarArea(PARAM_CAM *pCamParam, int iLR, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity, double dZMax = -1.);\n LaneMarkerPoints *searchLaneBoundaryPointsInFarAreaForSolidLine(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL);\n LaneMarkerPoints *searchLaneBoundaryPointsInFarAreaForSolidLine(PARAM_CAM *pCamParam, int iLR, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL);\n LaneMarkerPoints *searchLaneBoundaryPointsInFarAreaForSolidLine(PARAM_CAM *pCamParam, int iLR, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL, double dZMax = -1.);\n BOOL searchLaneBoundaryPointsInNearArea(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL);\n BOOL searchLaneBoundaryPointsInNearArea(PARAM_CAM *pCamParam, int iLR, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, Disparity *pDisparity = NULL);\n BOOL selectLaneBoundaryPointsToEstimateLaneParameter(PARAM_CAM *pCamParam);\n BOOL selectLaneBoundaryPointsToEstimateLaneParameterWithFarArea(PARAM_CAM *pCamParam);\n BOOL estimateLaneParameter(PARAM_CAM *pCamParam);\n BOOL decideDetectionStatus(PARAM_CAM *pCamParam);\n BOOL checkAndAdjustLaneParameters(void);\n BOOL decideLaneMarkerType(PARAM_CAM *pCamParam);\n BOOL checkLaneBoundaryDetectionStatus(PARAM_CAM *pCamParam);\n\n BOOL calc3DLineOfLaneBoundaries(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n BOOL calc3DLineOfLaneMarkers(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n BOOL calc3DLineOfLaneMarkerLines(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints);\n BOOL calcAverageEdgeStrengthOfLaneMarkerLines(void);\n BOOL eliminateShortLaneMarkerLines(PARAM_CAM *pCamParam);\n BOOL eliminateFewPointLaneMarkerLines(PARAM_CAM *pCamParam);\n BOOL estimate3Dplane(void);\n BOOL estimate3DSurface(LaneMarkerPoints *pLMPs);\n BOOL estimate3DSurface(void);\n BOOL pickupLaneBoundaryPointsOnRoadRegion(PARAM_CAM *pCamParam, LaneMarkerPoints *pEdgePointsOnRoad);\n BOOL pickupLaneMarkerPointsOnRoadRegion(PARAM_CAM *pCamParam, LaneMarkerPoints *pUpEdgePoints, LaneMarkerPoints *pDownEdgePoints, LaneMarkerPoints *pEdgePointsOnRoad);\n inline int getIsrcMin(void)\n {\n return _iIsrcMin;\n }\n inline int getIsrcMax(void)\n {\n return _iIsrcMax;\n }\n inline int getJsrcLeft_At_IsrcMin(void)\n {\n return _iJsrcLeft_At_IsrcMin;\n }\n inline int getJsrcRight_At_IsrcMin(void)\n {\n return _iJsrcRight_At_IsrcMin;\n }\n inline int getJsrcLeft_At_IsrcMax(void)\n {\n return _iJsrcLeft_At_IsrcMax;\n }\n inline int getJsrcRight_At_IsrcMax(void)\n {\n return _iJsrcRight_At_IsrcMax;\n }\n\n BOOL adjustEdgeThreshold(void);\n inline double *getPlane3D(void)\n {\n return _adPlane3D;\n }\n inline double *getSurface3D(void)\n {\n return _adSurface3D;\n }\n\n inline int LaneBoundaryFoundCounter(int iLR)\n {\n return\t_iLaneBoundaryFoundCounter[iLR];\n }\n inline int LaneBoundaryLostCounter(int iLR)\n {\n return\t_iLaneBoundaryLostCounter[iLR];\n }\n inline void clearLaneBoundaryFoundCounter(int iLR)\n {\n _iLaneBoundaryFoundCounter[iLR] = 0;\n }\n inline void clearLaneBoundaryLostCounter(int iLR)\n {\n _iLaneBoundaryLostCounter[iLR] = 0;\n }\n inline void incLaneBoundaryFoundCounter(int iLR)\n {\n _iLaneBoundaryFoundCounter[iLR]++;\n }\n inline void incLaneBoundaryLostCounter(int iLR)\n {\n _iLaneBoundaryLostCounter[iLR]++;\n }\n inline int LaneBoundarySolidCounter(int iLR)\n {\n return\t_iLaneBoundarySolidCounter[iLR];\n }\n inline int LaneBoundaryDashCounter(int iLR)\n {\n return\t_iLaneBoundaryDashCounter[iLR];\n }\n inline void clearLaneBoundarySolidCounter(int iLR)\n {\n _iLaneBoundarySolidCounter[iLR] = 0;\n }\n inline void clearLaneBoundaryDashCounter(int iLR)\n {\n _iLaneBoundaryDashCounter[iLR] = 0;\n }\n inline void incLaneBoundarySolidCounter(int iLR)\n {\n _iLaneBoundarySolidCounter[iLR]++;\n }\n inline void incLaneBoundaryDashCounter(int iLR)\n {\n _iLaneBoundaryDashCounter[iLR]++;\n }\n\n inline int LaneBoundaryType(int iLR, int iIdx)\n {\n return _iLaneBoundaryType[iLR][iIdx];\n }\n inline void LaneBoundaryType(int iLR, int iIdx, int iV)\n {\n _iLaneBoundaryType[iLR][iIdx] = iV;\n }\n\n\n inline double SearchMarginInNearArea(void)\n {\n return _dSearchMarginInNearArea;\n }\n inline void SearchMarginInNearArea(double v)\n {\n _dSearchMarginInNearArea = v;\n }\n inline double SearchMarginInFarArea(void)\n {\n return _dSearchMarginInFarArea;\n }\n inline void SearchMarginInFarArea(double v)\n {\n _dSearchMarginInFarArea = v;\n }\n inline double MaxDiffFromMedianDeviationInNearArea(void)\n {\n return \t_dMaxDiffFromMedianDeviationInNearArea;\n }\n inline void MaxDiffFromMedianDeviationInNearArea(double v)\n {\n _dMaxDiffFromMedianDeviationInNearArea = v;\n }\n inline double MaxDiffFromMedianDeviationInFarArea(void)\n {\n return \t_dMaxDiffFromMedianDeviationInFarArea;\n }\n inline void MaxDiffFromMedianDeviationInFarArea(double v)\n {\n _dMaxDiffFromMedianDeviationInFarArea = v;\n }\n\n\n BOOL detectLaneMarkerLineSequences(PARAM_CAM *pCamParam);\n BOOL checkLaneMarkerLineSequenceOn3DPlaneBoundaryByPointNumber(PARAM_CAM *pCamParam, Disparity *pDisparity, double *pdPlane3D, int iLR, LaneMarkerLineSequence *pLMLS);\n BOOL checkLaneMarkerLineSequenceOn3DPlaneBoundaryByAverageHeight(PARAM_CAM *pCamParam, Disparity *pDisparity, double *pdPlane3D, int iLR, LaneMarkerLineSequence *pLMLS);\n BOOL checkLaneMarkerLineSequenceOn3DPlaneBoundary(PARAM_CAM *pCamParam, Disparity *pDisparity, double *pdPlane3D, LaneMarkerLineSequence *pLMLS);\n BOOL checkLaneMarkerLineSequencesOn3DPlaneBoundary(PARAM_CAM *pCamParam, Disparity *pDisparity);\n BOOL checkLaneMarkerLinesOn3DPlaneBoundary(PARAM_CAM *pCamParam, Disparity *pDisparity);\n BOOL checkLaneMarkerLineSequenceOn3DPlaneBoundaryByPointsOnLine(PARAM_CAM *pCamParam, Disparity *pDisparity, double *pdPlane3D, int iLR, LaneMarkerLineSequence *pLMLS);\n BOOL checkLaneMarkerLineOn3DPlaneBoundaryByPointsOnLine(PARAM_CAM *pCamParam, Disparity *pDisparity, double *pdPlane3D, int iLR, LaneMarkerLine *pLML);\n\n BOOL checkLaneMarkerLineOn3DPlaneBoundaryByPointsOnLine(PARAM_CAM *pCamParam, Disparity *pDisparity);\n//\tBOOL checkLaneMarkerLineOn3DPlaneBoundaryByPointsOnLine(PARAM_CAM *pCamParam, Disparity *pDisparity, double *pdPlane3D, int iLR, LaneMarkerLine *pLML);\n LaneMarkerLineSequence *searchLaneMarkerLineSequenceOn3DPlaneBoundary(PARAM_CAM *pCamParam, int iLR, LaneMarkerLine *pLML, LaneMarkerLineSequences *pLMLSs);\n LaneMarkerLine *searchLaneMarkerLineOn3DPlaneBoundary(PARAM_CAM *pCamParam, int iLR, LaneMarkerLine *pLML, LaneMarkerLines *pLMLUps, LaneMarkerLines *pLMLDowns);\n LaneMarkerLine *searchLaneMarkerLineOn3DPlaneBoundary(PARAM_CAM *pCamParam, int iLR, LaneMarkerLine *pLML, LaneMarkerLine *pLMLOutSide, LaneMarkerLines *pLMLUps, LaneMarkerLines *pLMLDowns);\n BOOL searchLaneMarkerLineSequenceOn3DPlaneBoundary(PARAM_CAM *pCamParam);\n BOOL searchLaneMarkerLineOn3DPlaneBoundary(PARAM_CAM *pCamParam);\n\n inline int EdgeStrengthOfLaneBoundary(int iLR)\n {\n return _iEdgeStrengthOfLaneBoundary[iLR];\n }\n inline void EdgeStrengthOfLaneBoundary(int iLR, int iV)\n {\n _iEdgeStrengthOfLaneBoundary[iLR] = iV;\n }\n inline BOOL SearchInDefaultLanePositionFlag(int iLR)\n {\n return _bSearchInDefaultLanePosition[iLR];\n }\n inline void SearchInDefaultLanePositionFlag(int iLR, BOOL bV)\n {\n _bSearchInDefaultLanePosition[iLR] = bV;\n }\n\n inline int IsrcAvailable(int iLR)\n {\n return _iIsrcAvailable[iLR];\n }\n inline void IsrcAvailable(int iLR, int iV)\n {\n _iIsrcAvailable[iLR] = iV;\n }\n\n inline int getBothSideLostCounter(void)\n {\n return _iBothSideLostCounter;\n }\n inline void setBothSideLostCounter(int iV)\n {\n _iBothSideLostCounter = iV;\n }\n inline void incBothSideLostCounter(void)\n {\n _iBothSideLostCounter++;\n }\n inline void clearBothSideLostCounter(void)\n {\n _iBothSideLostCounter = 0;\n }\n\n\n inline int getLaneParameterType(void)\n {\n return _iLaneParameterType;\n }\n inline void setLaneParameterType(int iV)\n {\n _iLaneParameterType = iV;\n }\n\n inline double getPitchEstimated(void)\n {\n return _dPitchEstimated;\n }\n inline void setPitchEstimated(double dV)\n {\n _dPitchEstimated = dV;\n }\n inline double getPitchPredicted(void)\n {\n return _dPitchPredicted;\n }\n inline void setPitchPredicted(double dV)\n {\n _dPitchPredicted = dV;\n }\n\n\n inline int ProcLineNumber(void)\n {\n return _iProcLineNumber;\n }\n inline void ProcLineNumber(int iV)\n {\n _iProcLineNumber = iV;\n }\n inline void ProcLine(int *p)\n {\n SAFE_DELETE(_piProcLine);\n _piProcLine = p;\n }\n inline int *ProcLine(void)\n {\n return _piProcLine;\n }\n//\tBOOL calcProcLine(PARAM_CAM *pCamParam, int iMaxLineNumberOfRegion0, int iMaxLineNumberOfRegion1);\n BOOL calcProcLine(PARAM_CAM *pCamParam, int iIstepNearArea, int iMaxLineNumberRegion0, int iMaxLineNumberRegion1, int iIstepFarArea);\n//\tBOOL calcLineNumber(PARAM_CAM *pCamParam);\n\n inline int LineNumberFarArea(void)\n {\n return \t_iLineNumberFarArea;\n }\n inline void LineNumberFarArea(int iV)\n {\n _iLineNumberFarArea = iV;\n }\n inline int LineNumberNearArea(void)\n {\n return \t_iLineNumberNearArea;\n }\n inline void LineNumberNearArea(int iV)\n {\n _iLineNumberNearArea = iV;\n }\n\n LaneMarkerPoints *searchLaneMarkerPointsByLaneParameter(PARAM_CAM *pCamParam, LaneParameterOneSide *pLaneParameter, Disparity *pDisparity, double dXVehicle, double dZVehicleMin, double dZVehicleMax);\n BOOL calc3DplaneByLaneParameter(PARAM_CAM *pCamParam, LaneParameterOneSide *pLaneParameter, Disparity *pDisparity, double dXVehicleDiff, double dZVehicleMin, double dZVehicleMax, double *dPlane3D);\n BOOL calc3DplaneByLaneParameter(PARAM_CAM *pCamParam, int iK, LaneParameterOneSide *pLaneParameter, Disparity *pDisparity);\n\n BOOL checkMismatchOfLaneMarkerPointsFromLaneParameter(PARAM_CAM *pCamParam, LaneParameterOneSide *pLaneParameterOneSide, LaneMarkerPoints *pLaneMarkerPoints, LaneMarkerPoints *pNewLaneMarkerPoints);\n\n BOOL removeMismatchedDistanceLaneBoundaryPoints(PARAM_CAM *pCamParam, int iLR);\n BOOL removeIsolatedLaneBoundaryPoints(PARAM_CAM *pCamParam, int iLR);\n BOOL removeIsolatedLaneBoundaryPoints_2(PARAM_CAM *pCamParam, int iLR);\n\n inline void addTopIntensity(int iLR, int iV)\n {\n if(_faTopIntensity[iLR]->getNumber() >= 10)\n {\n _faTopIntensity[iLR]->remove(0);\n }\n _faTopIntensity[iLR]->add(iV);\n }\n inline int getTopIntensity(int iLR)\n {\n int iIdxMax = -1;\n int iVMax = -1;\n for(int iIdx = 0; iIdx < _faTopIntensity[iLR]->getNumber(); iIdx++)\n {\n int iV = _faTopIntensity[iLR]->get(iIdx);\n if(iVMax < iV)\n {\n iVMax = iV;\n iIdxMax = iIdx;\n }\n }\n return iVMax;\n }\n inline GridMap1D *getGridMap1D(void)\n {\n return _pGridMap1D;\n }\n BOOL voteToGridMap(PARAM_CAM *pCamParam, Disparity *pDisparity, int iIsrcMin, int iIsrcMax, double *pdOffset);\n inline OutputInfo *getOutputInfo(void)\n {\n return &_OutputInfo;\n }\n inline void SetOutputInfo(void)\n {\n LaneParameterOneSide *pLPLeft = getLaneParameterOneSide(LR_LEFT);\n LaneParameterOneSide *pLPRight = getLaneParameterOneSide(LR_RIGHT);\n getOutputInfo()->Pitch(atan(getPlane3D()[1]) / M_PI * 180.);\n getOutputInfo()->Cv(getSurface3D()[1]);\n getOutputInfo()->LeftOffset(pLPLeft->Param(LPID_OFFSET) / 1000.);\n getOutputInfo()->LeftYaw(pLPLeft->Param(LPID_YAW) / M_PI * 180.);\n getOutputInfo()->LeftC0(pLPLeft->Param(LPID_CURVATURE) * 1000.);\n getOutputInfo()->LeftC1(pLPLeft->Param(LPID_CURVATUREDASH) * 1000. * 1000.);\n getOutputInfo()->LeftAvailableDistance(pLPLeft->getDetectedDistance() / 1000.);\n getOutputInfo()->LeftCurbOffset(pLPLeft->getDeltaOffsetCurb() / 1000 + pLPLeft->Param(LPID_OFFSET) / 1000.);\n getOutputInfo()->RightOffset(pLPRight->Param(LPID_OFFSET) / 1000.);\n getOutputInfo()->RightYaw(pLPRight->Param(LPID_YAW) / M_PI * 180.);\n getOutputInfo()->RightC0(pLPRight->Param(LPID_CURVATURE) * 1000.);\n getOutputInfo()->RightC1(pLPRight->Param(LPID_CURVATUREDASH) * 1000. * 1000.);\n getOutputInfo()->RightAvailableDistance(pLPRight->getDetectedDistance() / 1000.);\n getOutputInfo()->RightCurbOffset(pLPRight->getDeltaOffsetCurb() / 1000. + pLPRight->Param(LPID_OFFSET) / 1000.);\n }\n\n};\n#endif // _LANE_AREA_H_\n"
},
{
"alpha_fraction": 0.6538461446762085,
"alphanum_fraction": 0.6538461446762085,
"avg_line_length": 51,
"blob_id": "9ebfc97c9aad5fcb12128e8ca56baf9c99dc68b9",
"content_id": "8ba76bdce99f8bc60c623491594f65854c1102bf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 52,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 1,
"path": "/athena/core/x86/Control/include/script.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "gnome-terminal -e 'bash -c \"./cplib.sh; exec bash\"'\n"
},
{
"alpha_fraction": 0.6645161509513855,
"alphanum_fraction": 0.6764976978302002,
"avg_line_length": 20,
"blob_id": "064670cf349160d3af0dab0f5cae214a04c00927",
"content_id": "ea66c8cda5c9e7e4504278f8d01e9f1b612898c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3255,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 155,
"path": "/athena/examples/ROS/src/DataRecording/rtk_inertial/src/lcm2ros_gps.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "//ROS\n#include <ros/ros.h>\n#include <tf/transform_datatypes.h>\n#include <rtk_inertial/Gps.h>\n#include <geometry_msgs/PoseStamped.h>\n#include <ros/time.h>\n\n\n//LCM\n#include <lcm/lcm-cpp.hpp>\n#include \"ins_info.hpp\"\n\n\n\n//Boost\n #include <boost/thread/thread.hpp> \n#include <boost/foreach.hpp>\n\n\n#include \"Commons/transfer.hpp\"\n\n\n#define ORIGIN_LAT 30.45814058804\t//wuhan\n#define ORIGIN_LON 114.31801222674\n\n\nclass LcmHandle\n{\npublic:\n\tLcmHandle(const ros::NodeHandle& nh);\n\n\n\nprotected:\n\n\tvoid onGpsCallback(const rtk_inertial::Gps::ConstPtr& msg);\n\n\tvoid handleMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, \n const obu_lcm::ins_info* msg);\n\n\n\n\tvoid start()\n\t{\n\t\tm_gps_lcm.subscribe(\"ins_info\", &LcmHandle::handleMessage, this);\n\t\twhile(0 == m_gps_lcm.handle());\n\t}\n\n\n\nprivate:\n\tros::NodeHandle m_nh;\n\tros::Publisher m_pubGps;\n\tros::Publisher m_pubPoseStamped;\n\n\tros::Subscriber m_subGps;\n\n\tlcm::LCM m_gps_lcm;\n\trtk_inertial::Gps m_gps;\n\n\tboost::thread m_queueThread;\n};\n\n\nvoid LcmHandle::handleMessage(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, \n const obu_lcm::ins_info* msg)\n {\n\t\t//m_gps.header.stamp = ros::Time::now();\n\t\t//m_gps.header.frame_id = \"gps\";\n\t\tm_gps.gps_time = msg->gps_time;\n\t\tm_gps.week= msg->week;\n\t\tm_gps.lat= msg->lat;\n\t\tm_gps.lon= msg->lon;\n\t\tm_gps.height= msg->height;\n\t\tm_gps.lateral_speed= msg->lateral_speed;\n\t\tm_gps.longitudinal_speed= msg->longitudinal_speed;\n\t\tm_gps.down_speed= msg->down_speed;\n\t\tm_gps.roll= msg->roll;\n\t\tm_gps.pitch= msg->pitch;\n\t\tm_gps.heading= msg->heading;\n\t\tm_gps.lateral_accelerate= msg->lateral_accelerate;\n\t\tm_gps.longitudinal_accelerate= msg->longitudinal_accelerate;\n\t\tm_gps.down_accelerate= msg->down_accelerate;\n\t\tm_gps.roll_speed= msg->roll_speed;\n\t\tm_gps.pitch_speed= msg->pitch_speed;\n\t\tm_gps.heading_speed= msg->heading_speed;\n\t\tm_gps.flag= msg->flag;\n\t\tm_gps.n= msg->n;\n\n\t\tm_pubGps.publish(m_gps);\n\n }\n\n\nvoid LcmHandle::onGpsCallback(const rtk_inertial::Gps::ConstPtr& msg)\n{\n\tgeometry_msgs::PoseStamped poseStamped;\n\tposeStamped.header.stamp = ros::Time(msg->gps_time);\n\tposeStamped.header.frame_id = \"gps\";\n\n\tdouble x=0.0, y=0.0;\n\tcoord_transfer transfer(ORIGIN_LAT,ORIGIN_LON);\n\ttransfer.cs.ll2xy(msg->lat, msg->lon, x, y);\n\t\n\tposeStamped.pose.position.x = x;\n\tposeStamped.pose.position.y = y;\n\tposeStamped.pose.position.z = msg->height;\n\n\tgeometry_msgs::Quaternion odom_quat = tf::createQuaternionMsgFromRollPitchYaw(msg->roll, msg->pitch, msg->heading);\n\n\n\tposeStamped.pose.orientation = odom_quat;\n\n\t \n\tm_pubPoseStamped.publish(poseStamped);\n}\n\n\n\n\nLcmHandle::LcmHandle(const ros::NodeHandle& nh)\n\t:m_nh(nh)\n{\n\tm_pubGps = m_nh.advertise<rtk_inertial::Gps>(\"gps\",1);\n\tm_pubPoseStamped = m_nh.advertise<geometry_msgs::PoseStamped>(\"poseStamped\",1);\n\n\n\tm_subGps = m_nh.subscribe<rtk_inertial::Gps>(\"gps\",1,&LcmHandle::onGpsCallback,this);\n\n\tif(!m_gps_lcm.good())\n \treturn;\n\n \t\n\tm_queueThread = boost::thread(boost::bind(&LcmHandle::start,this));\n\t\n}\n\n\nint main(int argc, char** argv)\n{\n\tros::init(argc, argv,\"rtk_inertial_node\");\n\tros::NodeHandle nh;\n\n\tLcmHandle lcmHandle(nh);\n\n\t/*ros::Rate rate(10);\n\twhile(ros::ok())\n\t{\n\t\trate.sleep(\n\t}*/\n\tros::spin();\n\treturn 0;\n}\n"
},
{
"alpha_fraction": 0.49474474787712097,
"alphanum_fraction": 0.5195195078849792,
"avg_line_length": 21.200000762939453,
"blob_id": "56fa11f422cf449c5b5d9cea1b19f6d964c26bc6",
"content_id": "f688d8887743d7d1c2471d45d461ed6d5dae3711",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1466,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 60,
"path": "/athena/examples/LCM/Singlecar/control/common/enum.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file enum.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_ENUM_H_\n#define APPS_ENUM_H_\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\ntypedef enum{\n LCM = 1, /**< LCM消息*/\n}MessageType;\n\ntypedef enum{\n CS55 = 1, /**< LCM消息*/\n TRUCK_J6P = 2,\n}VehicleType;\n\ntypedef enum{\n INVALID = 0, /**< 无效*/\n HUMAN_DRIVING_MODE = 1, /**< 人工驾驶*/\n AUTO_DRIVING_MODE = 3, /**< 自动驾驶*/\n}DrivingMode;\n\ntypedef enum{\n CONTROLLABLE = 1, /**< 可控但处于非控制状态*/\n CONTROLLED = 2, /**< 正在控制中*/\n UNCONTROLLABLE = 3, /**< 不可控*/\n}DrivingModeFeedBack;\n\ntypedef enum{\n POSITION_P = 0, /**< P档位*/\n POSITION_R = 1, /**< R档位*/\n POSITION_N = 2, /**< N档位*/\n POSITION_D = 3, /**< D档位*/\n}GearLevel;\n\ntypedef enum{\n POSITION_P_FEEDBACK = 0, /**< P档位反馈*/\n POSITION_R_FEEDBACK = 1, /**< R档位反馈*/\n POSITION_N_FEEDBACK = 2, /**< N档位反馈*/\n POSITION_D_FEEDBACK = 3, /**< D档位反馈*/\n POSITION_SHIFTING_FEEDBACK = 4, /**< 档位切换过程中*/\n POSITION_INVALID_FEEDBACK = 5, /**< 无效档位*/\n}GearLevelFeedBack;\n\n}//namespace control\n}//namespace athena\n\n#endif //APPS_ENUM_H_\n"
},
{
"alpha_fraction": 0.7372116446495056,
"alphanum_fraction": 0.7372116446495056,
"avg_line_length": 28.323530197143555,
"blob_id": "083a467543dfa17c87f40f6ed12cfc2e6f55ab3c",
"content_id": "af019debec42a3f3ef4a95ad6764066af97fa804",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 997,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 34,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerLineSequences.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include \"../utils/type.h\"\n#include \"../utils/flexarray.h\"\n#include \"LaneMarkerLineSequence.h\"\n\nclass LaneMarkerLineSequences\n{\nprivate:\n FlexArray<ptrLaneMarkerLineSequence>\t*_faLaneMarkerLineSequences;\n\npublic:\n LaneMarkerLineSequences(void);\n ~LaneMarkerLineSequences(void);\n LaneMarkerLineSequence *getLaneMarkerLineSequence(int idx);\n void deleteLaneMarkerLineSequence(void);\n void deleteLaneMarkerLineSequence(int iIdx);\n int getLaneMarkerLineSequenceNumber(void);\n void addLaneMarkerLineSequence(LaneMarkerLineSequence *pLaneMarkerLineSequence);\n inline void remove_delete(int iIdx)\n {\n _faLaneMarkerLineSequences->remove_delete(iIdx);\n }\n inline void clear_reset(void)\n {\n _faLaneMarkerLineSequences->clear();\n _faLaneMarkerLineSequences->reset();\n }\n inline void reset(void)\n {\n _faLaneMarkerLineSequences->reset();\n }\n void set(int iIdx, LaneMarkerLineSequence *pLaneMarkerLineSequence);\n};\n"
},
{
"alpha_fraction": 0.627920925617218,
"alphanum_fraction": 0.6375074982643127,
"avg_line_length": 20.397436141967773,
"blob_id": "fd28258bc26047623f3821732b386caab8d5134e",
"content_id": "6383437b943f7015de74a3304a26bee01b260ded",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1787,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 78,
"path": "/athena/examples/LCM/Singlecar/control/apps/message_manger/message_manger.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file message_manger.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef APPS_MESSAGE_MANGER_MESSAGE_MANGER_H_\n#define APPS_MESSAGE_MANGER_MESSAGE_MANGER_H_\n\n#include \"chassis.h\"\n#include \"trajectory.h\"\n#include \"localization.h\"\n#include \"../../common/chassis_detail.h\"\n#include \"../../common/control_cmd.h\"\n#include \"../../common/control_info_report.h\"\n#include \"../../common/emergency.h\"\n#include \"../../control_logic/control_logic.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n /**\n * @class MessageManger\n * @brief 消息管理器.\n */\nclass MessageManger{\n public:\n MessageManger() = default;\n ~MessageManger() = default;\n\n /**\n * @brief 初始化。\n * @param[in] obu_url LCM组播信息.\n * @param[in] control_logic 控制逻辑.\n * @return void.\n */\n virtual void Init(string obu_url,ControlLogic *control_logic) = 0;\n\n\n /**\n * @brief 控制消息上报.\n * @param[in] control_info_report 控制信息.\n * @return void.\n */\n virtual void PublishControlInfoReport(ControlInfoReport control_info_report) = 0;\n\n /**\n * @brief 控制消息发布.\n * @param[in] controller_output 控制输出.\n * @return void.\n */\n virtual void PublishControlCmd(ControlCmd control_cmd) = 0;\n\n /**\n * @brief BCM控制信息发送.\n * @param[in] bcm_control_cmd BCM控制信息.\n * @return void.\n */\n virtual void PublishBcmControlCmd(BcmControlCmd bcm_control_cmd) = 0;\n\n /**\n * @brief 紧急事件消息发送.\n * @param[in] emergency 紧急事件.\n * @return void.\n */\n virtual void PublishEmergencyCmd(Emergency emergency) = 0;\n};\n}\n}\n\n#endif // APPS_MESSAGE_MANGER_MESSAGE_MANGER_H_\n"
},
{
"alpha_fraction": 0.5236051678657532,
"alphanum_fraction": 0.5300429463386536,
"avg_line_length": 12.617647171020508,
"blob_id": "0d6278520d36ea7f0c8e43172b3e1b1f460ed05c",
"content_id": "df8e0871050f6a5bae9ff329c74f0803ed4db57f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 564,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 34,
"path": "/athena/examples/LCM/Singlecar/control/common/Thread.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef COMMUNITCATE_H\n#define COMMUNITCATE_H\n\n\n\n#include <pthread.h>\n\nclass Thread\n{\nprotected:\n pthread_t _tid;\n static void* run0(void* opt);\n void* run1();//如果类中有保存线程状态的变量,可以在这个函数中可以进行更改操作\npublic:\n Thread();\n ~Thread();\n /**\n * 创建线程,线程函数是 run0\n *\n * @return 成功返回 ture 否则返回 false\n */\n bool start();\n /**\n * join this thread\n *\n */\n void join();\n virtual void run(){\n\n }\n };\n\n\n #endif\n\n\n\n"
},
{
"alpha_fraction": 0.6592328548431396,
"alphanum_fraction": 0.6636931300163269,
"avg_line_length": 22.851064682006836,
"blob_id": "7a67de6ee568a106f51c5087a318b6c2834d6361",
"content_id": "84fc93de000fe0c63cbda205ee2fcaf5a78bc891",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1125,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 47,
"path": "/athena/core/arm/Control/include/common/path.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include <vector>\n#include \"navi_point.h\"\n#include \"../controller_config.h\"\n\nusing namespace std;\nusing namespace athena::control;\n\nclass path\n{\npublic:\n void read_navi_file(string path_gps_log_file,const ControllerConfig * controller_config);\n void read_test_file(string path_gps_log_file,const ControllerConfig * controller_config);\n\n void reset_path(const vector<navi_point>& ref_);\n void output_navi_point_all(char *filename,const ControllerConfig * controller_config);\n\n bool IsEmpty() const;\n\npublic:\n int8_t driving_mode_;//模式\n vector<navi_point> ref_points_;\n\n int insert_pos; //current insert pos;\n int current_pos; //current pos of vehicle;\n\n int st_pos; // current motion plan pos\n int en_pos; // end of motion plan pos\n\n path()\n {\n insert_pos = 0;\n current_pos = 0;\n st_pos = 0;\n en_pos = 0;\n }\n\n ~path()\n {\n\n }\n\n};\n\nvoid cau_all_mileage_of_points(vector<navi_point>& points, int start_pos, int end_pos);\nvoid cau_all_mileage_of_lane(path& virtual_lane);\nvoid cau_all_mileage_of_virtual_lane(path& virtual_lane);\n"
},
{
"alpha_fraction": 0.5257210731506348,
"alphanum_fraction": 0.5316681265830994,
"avg_line_length": 24.477272033691406,
"blob_id": "afe47fa8b9828632eccffb7fdcd713143d0602ed",
"content_id": "67611b2996a9ac073dbe7880c5fc3b82046a51ad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4843,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 132,
"path": "/athena/core/x86/Planning/include/spline/quartic_spline.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 四次样条拟合库,\n */\n\n#ifndef QUARTIC_SPLINE_H\n#define QUARTIC_SPLINE_H\n\n#include <vector>\n#include \"common/navi_point.h\"\n\nusing namespace std;\n\n/**\n * @class QuarticSpline\n * @brief 根据两点的状态,通过四次样条拟合出一条平滑的曲线,\n * 并可以内插出曲线内任意一点的坐标,计算该点的方向角和曲率。\n */\nclass QuarticSpline\n{\n public:\n /**\n * @brief 构造函数\n */\n QuarticSpline();\n /**\n * @brief 析构函数\n */\n virtual ~QuarticSpline();\n\n /**\n * @brief 设置起点和终点坐标,\n * @param x0 输入量:起点x坐标。\n * @param y0 输入量:起点y坐标。\n * @param x1 输入量:终点x坐标。\n * @param y1 输入量:终点y坐标。\n */\n int set_points( double x0, double y0, double x1, double y1 );\n\n /**\n * @brief 设置四次样条函数的边界条件,\n * @param dy0 输入量:起点的一阶导。\n * @param ddy0 输入量:起点的二阶导。\n * @param dy1 输入量:终点的一阶导。\n * @param flag 输入量:边界条件类型,等于0时表示以起点的一阶导、二阶导和终点的一阶导为边界条件。\n */\n int set_boundary( double dy0, double ddy0, double dy1, int flag );\n\n /**\n * @brief 求解四次样条函数的系数,\n * @param flag 输入量:边界条件类型,等于0时表示以起点的一阶导、二阶导和终点的一阶导为边界条件。\n */\n int compute_coef( int flag ); //flag : 边界条件类型\n\n /**\n * @brief 给定曲线范围内任一x值,根据四次样条函数求解对应的y值,\n * @param x 输入量:曲线范围内任一x值。\n * @return x对应的y值。\n */\n double operator() ( double x ) const;\n\n /**\n * @brief 给定曲线范围内任一x值,根据四次样条函数求解y的一阶导,\n * @param x 输入量:曲线范围内任一x值。\n * @return y的一阶导。\n */\n double compute_first_deriv( double x );\n\n /**\n * @brief 给定曲线范围内任一x值,根据四次样条函数求解y的二阶导,\n * @param x 输入量:曲线范围内任一x值。\n * @return y的二阶导。\n */\n double compute_second_deriv( double x );\n\n /**\n * @brief 给定曲线范围内任一x值,根据四次样条函数求解该点上切线的方向角,\n * @param x 输入量:曲线范围内任一x值。\n * @return 该点上切线的方向角。\n */\n double compute_heading( double x );\n\n /**\n * @brief 给定曲线范围内任一x值,根据四次样条函数求解该点曲率,\n * @param x 输入量:曲线范围内任一x值。\n * @return 该点曲率。\n */\n double compute_k( double x );\n\n /**\n * @brief 获得四次样条函数的系数,\n * @param coef 输出量:四次样条函数的系数。\n */\n int get_coef( double *coef );\n\n /**\n * @brief 根据两点的状态,通过四次样条拟合出一条平滑的曲线,\n * @param pos0 输入量:起点状态,需要知道其坐标,方向,曲率。\n * @param pos1 输入量:终点状态。需要知道其坐标,方向。\n * @param step 输入量:生成轨迹点的间隔,单位:米。\n */\n int gen_trj_from_two_points( navi_point pos0, navi_point pos1, double step );\n\n /**\n * @brief 获取生成的局部坐标系下路径,\n * @param out_path_local 输出量:输出的局部坐标系下路径。\n */\n int get_path_local( vector< navi_point > &out_path_local );\n\n /**\n * @brief 获取生成的全局坐标系下路径,\n * @param out_path_global 输出量:输出的全局坐标系下路径。\n */\n int get_path_global( vector< navi_point > &out_path_global );\n\n protected:\n\n private:\n ///成员变量:四次样条函数的系数。\n double m_a_, m_b_, m_c_, m_d_, m_e_;\n ///成员变量:起点和终点的坐标。\n double m_x_start_, m_y_start_, m_x_end_, m_y_end_;\n ///成员变量:起点和终点的坐标的一阶导、二阶导及三阶导。\n double m_first_deriv_start_, m_first_deriv_end_,\n m_second_deriv_start_, m_second_deriv_end_,\n m_third_deriv_start_, m_third_deriv_end_;\n vector< navi_point > path_local_; ///<四次样条拟合生成的局部坐标系下路径\n vector< navi_point > path_global_; ///<四次样条拟合生成的全局坐标系下路径\n};\n\n#endif // QUARTIC_SPLINE_H\n"
},
{
"alpha_fraction": 0.6377676129341125,
"alphanum_fraction": 0.6444366574287415,
"avg_line_length": 17.14649772644043,
"blob_id": "6feb8e050c8220fd4ad43bd2709a877f9f25db71",
"content_id": "bd33f047f36279af9402720557a5f8f9b3c7eb1c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3267,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 157,
"path": "/athena/core/arm/Control/include/lon_controller/vehicle_dynamics.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file vehicle_dynamics.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_LON_CONTROLLER_VEHICLE_DYNAMICS_H_\n#define CONTROLLER_LON_CONTROLLER_VEHICLE_DYNAMICS_H_\n\n#include <iostream>\n#include <vector>\n#include \"../pid/pid_controller.h\"\n\nusing namespace std;\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class VehicleDynamicsInput\n *\n * @brief VehicleDynamics Input.\n */\nclass VehicleDynamicsInput{\npublic:\n ///目标速度\n double tar_speed_;\n ///当前速度\n double current_speed_;\n ///车重\n double m_;\n ///空气阻力系数\n double cd_;\n ///车轮半径\n double wheel_radius_;\n ///车辆高度\n double h_;\n ///俯仰角\n double current_pitch_;\n ///地面摩擦系数\n double kr_;\n ///当前加速度\n double current_accelerate_;\n ///当前转向角度\n double steer_angle_;\n ///后轴到重心的距离\n double l_b_;\n ///轮距\n double l_;\n ///P值\n double kp_;\n ///I值\n double ki_;\n ///D值\n double kd_;\n};\n\n/**\n * @class VehicleDynamics\n *\n * @brief VehicleDynamics.\n */\nclass VehicleDynamics{\n public:\n VehicleDynamics() = default;\n ~VehicleDynamics() = default;\n\n/**\n * @brief 获取纵向控制车辆动力学输出\n * @param[in] vehicle_dynamics_input_ 输入\n * @return 驱动力值.\n */\n double GetVehicleDynamicsOutput(VehicleDynamicsInput vehicle_dynamics_input);\n\n private:\n const double g = 9.8;\n const double air_resistance_coefficient_ = 0.3;\n const double rolling_resistance_coefficient_ = 0.015;\n\n\n ///纵向控制车辆动力学输入\n VehicleDynamicsInput vehicle_dynamics_input_;\n ///PID控制器\n PIDController pid_controller_;\n\n/**\n * @brief 前馈值计算\n * @return 前馈值.\n */\n double GetFeedforward();\n\n /**\n * @brief 反馈值计算\n * @return 反馈值.\n */\n double GetFeedback();\n\n/**\n * @brief 计算风阻\n * @param[in] cd_ 空气阻力系数\n * @param[in] front_wheel_base_ 前轮轮距\n * @param[in] h_ 车辆高度\n * @param[in] current_speed_ 当前车速\n * @return 风阻.\n */\n double CalculateWindResistance(double cd,double front_wheel_base,double h_,double current_speed);\n\n\n/**\n * @brief 计算坡道阻力\n * @param[in] m_ 车重\n * @param[in] current_pitch_ 俯仰角\n * @return 坡阻力.\n */\ndouble CalculateSlopeeResistance(double m,double current_pitch);\n\n/**\n * @brief 计算滚动阻力\n * @param[in] m_ 车重\n * @param[in] kr_ 地面摩擦系数\n * @return 滚动阻力\n */\ndouble CalculateRollResistance(double m,double kr);\n\n\n/**\n * @brief 计算加速阻力。\n * @param[in] m_ 车重。\n * @param[in] current_accelerate_ 当前加速度。\n * @return 加速阻力.\n */\ndouble CalculateAccResistance(double m,double current_accelerate);\n\n/**\n * @brief 转向阻力\n * @param[in] m_ 车重\n * @param[in] steer_angle_ 转向角\n * @param[in] current_speed_ 当前速度\n * @param[in] l_b 后轴到重心的距离\n * @param[in] l 轴距\n * @return 转向阻力.\n */\n//double CalculateTurningResistance(double m,double steer_angle,double current_speed,double l_b,double l);\n\n};\n}\n}\n\n#endif // CONTROLLER_LON_CONTROLLER_VEHICLE_DYNAMICS_H_\n"
},
{
"alpha_fraction": 0.5754386186599731,
"alphanum_fraction": 0.601403534412384,
"avg_line_length": 22.75,
"blob_id": "ac8898d4604f431d0db139792656af374bc05efb",
"content_id": "f3cc6db6e037427dd88a14777575340792df4b33",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1433,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 60,
"path": "/athena/examples/LCM/Singlecar/control/common/control_info_report.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file control_info_report.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_CONTROL_INFO_REPORT_H_\n#define COMMON_CONTROL_INFO_REPORT_H_\n\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n/**\n * @class ControlInfoReport\n * @brief 控制反馈.\n */\nclass ControlInfoReport\n{\npublic:\n ControlInfoReport() = default;\n ~ControlInfoReport() =default;\n\n double cur_lon_;\n double cur_lat_;\n double cur_yaw_;\n double cur_brake_;\n double cur_speed_;\n double cur_speed_lateral_;\n double cur_speed_longitudinal_;\n double cur_acceleration_pattern_;\n double cur_acceleration_;\n double cur_acceleration_lateral_;\n double cur_acceleration_longitudinal_;\n double steering_angle_;\n int32_t flashing_status_;\n int32_t cur_gears_;\n int32_t num_of_camera_stat_;\n std::vector< int32_t > camera_stat_;\n int32_t num_of_radar_stat_;\n std::vector< int32_t > radar_stat_;\n int32_t num_of_lidar_stat_;\n std::vector< int32_t > lidar_stat_;\n int32_t num_of_gps_stat_;\n std::vector< int32_t > gps_stat_;\n double gps_time_;\n double mileage_;\n int32_t eps_stat_;\n int32_t epb_stat_;\n int32_t brake_stat_;\n};\n}\n}\n#endif //COMMON_CONTROL_INFO_REPORT_H_\n"
},
{
"alpha_fraction": 0.5154494643211365,
"alphanum_fraction": 0.5568820238113403,
"avg_line_length": 21.58730125427246,
"blob_id": "693aac8da5aff4e42d7fe810305783021916b212",
"content_id": "5eb83af6c6c43bd495b2e168d5434abd3bd2b2dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1708,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 63,
"path": "/athena/examples/LCM/Singlecar/control/common/bcm_control_cmd.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n/**\n * @file bcm_control_cmd.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef COMMON_BCM_CONTROL_CMD_H_\n#define COMMON_BCM_CONTROL_CMD_H_\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n/**\n * @class ChassisDetail\n * @brief 车辆底盘信息.\n */\nclass BcmControlCmd\n{\npublic:\n BcmControlCmd() = default;\n ~BcmControlCmd() = default;\n ///喇叭控制 0 禁声音 1 鸣笛\n int8_t speaker_control_;\n ///远光灯 0 关闭 1 开启\n int8_t high_beam_ctrl_;\n ///近光灯 0 关闭 1 开启\n int8_t low_beam_ctrl_;\n ///左转向灯 0 关闭 1 开启\n int8_t left_turn_ctrl_;\n ///右转向灯 0 关闭 1 开启\n int8_t right_turn_ctrl_;\n ///前雨刮器 0 关闭 1 开启\n\tint8_t front_wiper_ctrl_;\n ///后雨刮器 0 关闭 1 开启\n int8_t rear_wiper_ctrl_;\n ///位置灯 0 关闭 1 开启\n int8_t position_lamp_ctrl_;\n ///前雾灯 0 关闭 1 开启\n int8_t front_fog_lamp_ctrl_;\n ///后雾灯 0 关闭 1 开启\n int8_t rear_fog_lamp_ctrl_;\n ///刹车灯 一般情况自动控制 0 关闭 1 开启\n int8_t brake_lamp_ctrl_;\n ///警报灯 双闪 0 关闭 1 开启\n int8_t alarm_lamp_ctrl_;\n /// 左前门控制 0 关闭 1 开启\n int8_t lf_door_ctrl_;\n /// 右前门控制 0 关闭 1 开启\n int8_t rf_door_ctrl_;\n /// 左后门控制 0 关闭 1 开启\n int8_t lr_door_ctrl_;\n /// 右后门控制 0 关闭 1 开启\n int8_t rr_door_ctrl_;\n};\n}//namespace control\n}//namespace athena\n\n#endif //COMMON_BCM_CONTROL_CMD_H_\n"
},
{
"alpha_fraction": 0.5162696242332458,
"alphanum_fraction": 0.5395119190216064,
"avg_line_length": 22.737930297851562,
"blob_id": "ed73a6741ff7124679e1f37ef0b0bc6697e2c799",
"content_id": "4507d17e0d04a229490bad88e20c7f01f869f2cf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3442,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 145,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerLine.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/type.h\"\n#include \"../utils/config.h\"\n#include \"LaneMarkerPoints.h\"\n\nclass LaneMarkerLine\n{\nprivate:\n int _iOffset;\n int _iYaw;\n int _iVotes;\n\n double _ad3DLine[2][3];\n int _iDirection;\n LaneMarkerPoints *_pLaneMarkerPoints;\n BOOL\t_bOn3DPlaneBoundary;\n double _adAverageHeight[2];\n\npublic:\n inline LaneMarkerLine(void)\t:\n _iOffset(-1),\n _iYaw(-1),\n _iVotes(-1),\n _iDirection(-1)\n {\n for(int iIdx0 = 0; iIdx0 < 2; iIdx0++)\n {\n for(int iIdx1 = 0; iIdx1 < 3; iIdx1++)\n {\n _ad3DLine[iIdx0][iIdx1] = 0.0;\n }\n }\n _pLaneMarkerPoints = NULL;\n _bOn3DPlaneBoundary = FALSE;\n _adAverageHeight[0] = 0;\n _adAverageHeight[1] = 0;\n }\n\n inline LaneMarkerLine(int iOffset, int iYaw, int iVotes)\n {\n _iOffset = iOffset;\n _iYaw = iYaw;\n _iVotes = iVotes;\n _iDirection = -1;\n for(int iIdx0 = 0; iIdx0 < 2; iIdx0++)\n {\n for(int iIdx1 = 0; iIdx1 < 3; iIdx1++)\n {\n _ad3DLine[iIdx0][iIdx1] = 0.0;\n }\n }\n _pLaneMarkerPoints = NULL;\n _bOn3DPlaneBoundary = FALSE;\n _adAverageHeight[0] = 0;\n _adAverageHeight[1] = 0;\n }\n inline LaneMarkerLine(LaneMarkerLine *pSrc)\n {\n if(pSrc == NULL)\treturn;\n _iOffset = pSrc->offset();\n _iYaw = pSrc->yaw();\n _iVotes = pSrc->votes();\n _iDirection = pSrc->direction();\n for(int iIdx0 = 0; iIdx0 < 2; iIdx0++)\n {\n for(int iIdx1 = 0; iIdx1 < 3; iIdx1++)\n {\n _ad3DLine[iIdx0][iIdx1] = (pSrc->get3DLine(iIdx0))[iIdx1];\n }\n }\n if(pSrc->getLaneMarkerPoints() != NULL)\n {\n _pLaneMarkerPoints = new LaneMarkerPoints(pSrc->getLaneMarkerPoints());\n }\n _bOn3DPlaneBoundary = pSrc->getFlagOn3DPlaneBoundary();\n _adAverageHeight[0] = pSrc->getAverageHeight()[0];\n _adAverageHeight[1] = pSrc->getAverageHeight()[1];\n }\n\n inline ~LaneMarkerLine(void)\n {\n SAFE_DELETE(_pLaneMarkerPoints);\n }\n\n inline void offset(int v)\n {\n _iOffset = v;\n }\n inline void yaw(int v)\n {\n _iYaw = v;\n }\n inline void votes(int v)\n {\n _iVotes = v;\n }\n inline int offset(void)\n {\n return _iOffset;\n }\n inline int yaw(void)\n {\n return _iYaw;\n }\n inline int votes(void)\n {\n return _iVotes;\n }\n inline void direction(int v)\n {\n _iDirection = v;\n }\n inline int direction(void)\n {\n return _iDirection;\n }\n inline double *get3DLine(int iIdx)\n {\n return _ad3DLine[iIdx];\n }\n inline LaneMarkerPoints *getLaneMarkerPoints(void)\n {\n return _pLaneMarkerPoints;\n }\n inline void setLaneMarkerPoints(LaneMarkerPoints *pLMP)\n {\n _pLaneMarkerPoints = pLMP;\n }\n BOOL calcPosOn3DLine(double *pdX, double *pdY, double dZ);\n BOOL calcAngleOn3DLine(double *pdPitch, double *pdYaw);\n inline BOOL getFlagOn3DPlaneBoundary(void)\n {\n return\t_bOn3DPlaneBoundary;\n }\n inline void setFlagOn3DPlaneBoundary(BOOL bV)\n {\n _bOn3DPlaneBoundary = bV;\n }\n inline double *getAverageHeight(void)\n {\n return _adAverageHeight;\n }\n};\n\ntypedef LaneMarkerLine * ptrLaneMarkerLine;\n"
},
{
"alpha_fraction": 0.47455552220344543,
"alphanum_fraction": 0.4774612784385681,
"avg_line_length": 29.06321907043457,
"blob_id": "5e60ad15937c6c309c4dc7d3885409bbff1f0daa",
"content_id": "236fccce1bdccdf45e5e069ff1de6e5b5d4b8361",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 30347,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 870,
"path": "/athena/core/x86/Common/include/base/db/nad_db.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_db.h\n * 创建者:代码生成工具\n * 时 间:2016-12-20\n * 描 述:数据库的头文件\n-------------------------------------------------------*/\n#ifndef __NAD_DB_H__\n#define __NAD_DB_H__\n\n\n//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n// 自动生成的代码,请勿修改\n//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n\n//引用base头文件\n#include \"nad_base.h\"\n\n\n/*-------------------------------------------------------\n * 初始化数据库的API函数\n-------------------------------------------------------*/\n\n//从db.xml(动态数据)加载数据,并保存到db中(std::map格式的内存数据库)\nint nad_load_xml_to_db();\n\n//从db_type.xml(静态数据)加载数据,并保存到db中(std::map格式的内存数据库)\nint nad_load_type_xml_to_db();\n\n\n/*-------------------------------------------------------\n * scene_type: 场景类型\n-------------------------------------------------------*/\n\n//scene_type表记录\nclass nad_record_scene_type\n{\npublic:\n string scene_type; //场景类型名称\n string scene_desc; //场景类型描述\n double avg_speed; //平均车速(km/h)\n double avg_lane_width; //平均道路宽度(cm)\n\n //赋值/打印调试信息\n nad_record_scene_type & operator = (const nad_record_scene_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_scene_type &rec)\n {\n cout << \"scene_type: scene_type=\\\"\" << rec.scene_type //场景类型名称\n << \"\\\", scene_desc=\\\"\" << rec.scene_desc //场景类型描述\n << \"\\\", avg_speed=\\\"\" << rec.avg_speed //平均车速(km/h)\n << \"\\\", avg_lane_width=\\\"\" << rec.avg_lane_width //平均道路宽度(cm)\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询scene_type表,失败返回NULL\nnad_record_scene_type *db_query_scene_type(string scene_type);\n\n//查询scene_type表的所有记录,不会失败\nmap<string, nad_record_scene_type> &db_query_all_scene_type();\n\n\n/*-------------------------------------------------------\n * scene: 当前场景\n-------------------------------------------------------*/\n\n//scene表记录\nclass nad_record_scene\n{\npublic:\n string scene_name; //场景名称\n string scene_type; //场景类型\n\n //赋值/打印调试信息\n nad_record_scene & operator = (const nad_record_scene &rec);\n friend inline ostream & operator << (ostream & os, nad_record_scene &rec)\n {\n cout << \"scene: scene_name=\\\"\" << rec.scene_name //场景名称\n << \"\\\", scene_type=\\\"\" << rec.scene_type //场景类型\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询scene表,失败返回NULL\nnad_record_scene *db_query_scene(string scene_name);\n\n//查询scene表的所有记录,不会失败\nmap<string, nad_record_scene> &db_query_all_scene();\n\n\n/*-------------------------------------------------------\n * csu_type: csu类型\n-------------------------------------------------------*/\n\n//csu_type表记录\nclass nad_record_csu_type\n{\npublic:\n string csu_type; //csu类型名称\n string csu_desc; //csu类型描述\n\n //赋值/打印调试信息\n nad_record_csu_type & operator = (const nad_record_csu_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_csu_type &rec)\n {\n cout << \"csu_type: csu_type=\\\"\" << rec.csu_type //csu类型名称\n << \"\\\", csu_desc=\\\"\" << rec.csu_desc //csu类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询csu_type表,失败返回NULL\nnad_record_csu_type *db_query_csu_type(string csu_type);\n\n//查询csu_type表的所有记录,不会失败\nmap<string, nad_record_csu_type> &db_query_all_csu_type();\n\n\n/*-------------------------------------------------------\n * csu: csu\n-------------------------------------------------------*/\n\n//csu表记录\nclass nad_record_csu\n{\npublic:\n string csu_name; //csu名称\n string csu_type; //csu类型\n\n //赋值/打印调试信息\n nad_record_csu & operator = (const nad_record_csu &rec);\n friend inline ostream & operator << (ostream & os, nad_record_csu &rec)\n {\n cout << \"csu: csu_name=\\\"\" << rec.csu_name //csu名称\n << \"\\\", csu_type=\\\"\" << rec.csu_type //csu类型\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询csu表,失败返回NULL\nnad_record_csu *db_query_csu(string csu_name);\n\n//查询csu表的所有记录,不会失败\nmap<string, nad_record_csu> &db_query_all_csu();\n\n\n/*-------------------------------------------------------\n * csu_user: csu用户(用于OCT登录)\n-------------------------------------------------------*/\n\n//csu_user表记录\nclass nad_record_csu_user\n{\npublic:\n string csu_user; //csu登录用户名\n string csu_password; //csu登录密码\n\n //赋值/打印调试信息\n nad_record_csu_user & operator = (const nad_record_csu_user &rec);\n friend inline ostream & operator << (ostream & os, nad_record_csu_user &rec)\n {\n cout << \"csu_user: csu_user=\\\"\" << rec.csu_user //csu登录用户名\n << \"\\\", csu_password=\\\"\" << rec.csu_password //csu登录密码\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询csu_user表,失败返回NULL\nnad_record_csu_user *db_query_csu_user(string csu_user);\n\n//查询csu_user表的所有记录,不会失败\nmap<string, nad_record_csu_user> &db_query_all_csu_user();\n\n\n/*-------------------------------------------------------\n * rsu_type: rsu类型\n-------------------------------------------------------*/\n\n//rsu_type表记录\nclass nad_record_rsu_type\n{\npublic:\n string rsu_type; //rsu类型名称\n string rsu_desc; //rsu类型描述\n\n //赋值/打印调试信息\n nad_record_rsu_type & operator = (const nad_record_rsu_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_rsu_type &rec)\n {\n cout << \"rsu_type: rsu_type=\\\"\" << rec.rsu_type //rsu类型名称\n << \"\\\", rsu_desc=\\\"\" << rec.rsu_desc //rsu类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询rsu_type表,失败返回NULL\nnad_record_rsu_type *db_query_rsu_type(string rsu_type);\n\n//查询rsu_type表的所有记录,不会失败\nmap<string, nad_record_rsu_type> &db_query_all_rsu_type();\n\n\n/*-------------------------------------------------------\n * rsu: rsu\n-------------------------------------------------------*/\n\n//rsu表记录\nclass nad_record_rsu\n{\npublic:\n string rsu_name; //rsu名称\n string rsu_type; //rsu类型\n double rsu_lon; //rsu位置(经度)\n double rsu_lat; //rsu位置(纬度)\n double rsu_radius_lon; //rsu覆盖半径(经度,米)\n double rsu_radius_lat; //rsu覆盖半径(纬度,米)\n\n //赋值/打印调试信息\n nad_record_rsu & operator = (const nad_record_rsu &rec);\n friend inline ostream & operator << (ostream & os, nad_record_rsu &rec)\n {\n cout << \"rsu: rsu_name=\\\"\" << rec.rsu_name //rsu名称\n << \"\\\", rsu_type=\\\"\" << rec.rsu_type //rsu类型\n << \"\\\", rsu_lon=\\\"\" << rec.rsu_lon //rsu位置(经度)\n << \"\\\", rsu_lat=\\\"\" << rec.rsu_lat //rsu位置(纬度)\n << \"\\\", rsu_radius_lon=\\\"\" << rec.rsu_radius_lon //rsu覆盖半径(经度,米)\n << \"\\\", rsu_radius_lat=\\\"\" << rec.rsu_radius_lat //rsu覆盖半径(纬度,米)\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询rsu表,失败返回NULL\nnad_record_rsu *db_query_rsu(string rsu_name);\n\n//查询rsu表的所有记录,不会失败\nmap<string, nad_record_rsu> &db_query_all_rsu();\n\n\n/*-------------------------------------------------------\n * obu_type: obu类型\n-------------------------------------------------------*/\n\n//obu_type表记录\nclass nad_record_obu_type\n{\npublic:\n string obu_type; //obu类型名称\n string obu_desc; //obu类型描述\n\n //赋值/打印调试信息\n nad_record_obu_type & operator = (const nad_record_obu_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_obu_type &rec)\n {\n cout << \"obu_type: obu_type=\\\"\" << rec.obu_type //obu类型名称\n << \"\\\", obu_desc=\\\"\" << rec.obu_desc //obu类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询obu_type表,失败返回NULL\nnad_record_obu_type *db_query_obu_type(string obu_type);\n\n//查询obu_type表的所有记录,不会失败\nmap<string, nad_record_obu_type> &db_query_all_obu_type();\n\n\n/*-------------------------------------------------------\n * obu: obu\n-------------------------------------------------------*/\n\n//obu表记录\nclass nad_record_obu\n{\npublic:\n string obu_name; //obu名称\n string obu_type; //obu类型\n\n //赋值/打印调试信息\n nad_record_obu & operator = (const nad_record_obu &rec);\n friend inline ostream & operator << (ostream & os, nad_record_obu &rec)\n {\n cout << \"obu: obu_name=\\\"\" << rec.obu_name //obu名称\n << \"\\\", obu_type=\\\"\" << rec.obu_type //obu类型\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询obu表,失败返回NULL\nnad_record_obu *db_query_obu(string obu_name);\n\n//查询obu表的所有记录,不会失败\nmap<string, nad_record_obu> &db_query_all_obu();\n\n\n/*-------------------------------------------------------\n * oct_type: oct类型\n-------------------------------------------------------*/\n\n//oct_type表记录\nclass nad_record_oct_type\n{\npublic:\n string oct_type; //oct类型名称\n string oct_desc; //oct类型描述\n\n //赋值/打印调试信息\n nad_record_oct_type & operator = (const nad_record_oct_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_oct_type &rec)\n {\n cout << \"oct_type: oct_type=\\\"\" << rec.oct_type //oct类型名称\n << \"\\\", oct_desc=\\\"\" << rec.oct_desc //oct类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询oct_type表,失败返回NULL\nnad_record_oct_type *db_query_oct_type(string oct_type);\n\n//查询oct_type表的所有记录,不会失败\nmap<string, nad_record_oct_type> &db_query_all_oct_type();\n\n\n/*-------------------------------------------------------\n * oct: oct\n-------------------------------------------------------*/\n\n//oct表记录\nclass nad_record_oct\n{\npublic:\n string oct_name; //oct名称\n string oct_type; //oct类型\n\n //赋值/打印调试信息\n nad_record_oct & operator = (const nad_record_oct &rec);\n friend inline ostream & operator << (ostream & os, nad_record_oct &rec)\n {\n cout << \"oct: oct_name=\\\"\" << rec.oct_name //oct名称\n << \"\\\", oct_type=\\\"\" << rec.oct_type //oct类型\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询oct表,失败返回NULL\nnad_record_oct *db_query_oct(string oct_name);\n\n//查询oct表的所有记录,不会失败\nmap<string, nad_record_oct> &db_query_all_oct();\n\n\n/*-------------------------------------------------------\n * rsd_type: 路侧采集器类型\n-------------------------------------------------------*/\n\n//rsd_type表记录\nclass nad_record_rsd_type\n{\npublic:\n string rsd_type; //rsd类型名称\n string rsd_desc; //rsd类型描述\n\n //赋值/打印调试信息\n nad_record_rsd_type & operator = (const nad_record_rsd_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_rsd_type &rec)\n {\n cout << \"rsd_type: rsd_type=\\\"\" << rec.rsd_type //rsd类型名称\n << \"\\\", rsd_desc=\\\"\" << rec.rsd_desc //rsd类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询rsd_type表,失败返回NULL\nnad_record_rsd_type *db_query_rsd_type(string rsd_type);\n\n//查询rsd_type表的所有记录,不会失败\nmap<string, nad_record_rsd_type> &db_query_all_rsd_type();\n\n\n/*-------------------------------------------------------\n * rsd: 路侧采集器名称\n-------------------------------------------------------*/\n\n//rsd表记录\nclass nad_record_rsd\n{\npublic:\n string rsd_name; //rsd名称\n string rsd_type; //rsd类型\n string rsd_ip; //rsd的IP\n\n //赋值/打印调试信息\n nad_record_rsd & operator = (const nad_record_rsd &rec);\n friend inline ostream & operator << (ostream & os, nad_record_rsd &rec)\n {\n cout << \"rsd: rsd_name=\\\"\" << rec.rsd_name //rsd名称\n << \"\\\", rsd_type=\\\"\" << rec.rsd_type //rsd类型\n << \"\\\", rsd_ip=\\\"\" << rec.rsd_ip //rsd的IP\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询rsd表,失败返回NULL\nnad_record_rsd *db_query_rsd(string rsd_name);\n\n//查询rsd表的所有记录,不会失败\nmap<string, nad_record_rsd> &db_query_all_rsd();\n\n\n/*-------------------------------------------------------\n * rsd_sensor: RSD的传感器\n-------------------------------------------------------*/\n\n//rsd_sensor表记录\nclass nad_record_rsd_sensor\n{\npublic:\n string rsd_sensor_name; //rsd传感器名称\n int type; //rsd传感器类型\n double lon; //安装位置的经度\n double lat; //安装位置的纬度\n double x; //安装位置的x坐标\n double y; //安装位置的y坐标\n double yaw; //头指向(相对正北的夹角)\n int range; //有效扫描范围\n int vertical_fov; //垂直视场角\n int horizontal_fov; //水平视场角\n string ip; //rsd传感器的IP\n int64 lane_id; //管理道路的高速道ID\n\n //赋值/打印调试信息\n nad_record_rsd_sensor & operator = (const nad_record_rsd_sensor &rec);\n friend inline ostream & operator << (ostream & os, nad_record_rsd_sensor &rec)\n {\n cout << \"rsd_sensor: rsd_sensor_name=\\\"\" << rec.rsd_sensor_name //rsd传感器名称\n << \"\\\", type=\\\"\" << rec.type //rsd传感器类型\n << \"\\\", lon=\\\"\" << rec.lon //安装位置的经度\n << \"\\\", lat=\\\"\" << rec.lat //安装位置的纬度\n << \"\\\", x=\\\"\" << rec.x //安装位置的x坐标\n << \"\\\", y=\\\"\" << rec.y //安装位置的y坐标\n << \"\\\", yaw=\\\"\" << rec.yaw //头指向(相对正北的夹角)\n << \"\\\", range=\\\"\" << rec.range //有效扫描范围\n << \"\\\", vertical_fov=\\\"\" << rec.vertical_fov //垂直视场角\n << \"\\\", horizontal_fov=\\\"\" << rec.horizontal_fov //水平视场角\n << \"\\\", ip=\\\"\" << rec.ip //rsd传感器的IP\n << \"\\\", lane_id=\\\"\" << rec.lane_id //管理道路的高速道ID\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询rsd_sensor表,失败返回NULL\nnad_record_rsd_sensor *db_query_rsd_sensor(string rsd_sensor_name);\n\n//查询rsd_sensor表的所有记录,不会失败\nmap<string, nad_record_rsd_sensor> &db_query_all_rsd_sensor();\n\n\n/*-------------------------------------------------------\n * light_type: 红绿灯类型\n-------------------------------------------------------*/\n\n//light_type表记录\nclass nad_record_light_type\n{\npublic:\n string light_type; //红绿灯类型名称\n string light_desc; //红绿灯类型描述\n\n //赋值/打印调试信息\n nad_record_light_type & operator = (const nad_record_light_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_light_type &rec)\n {\n cout << \"light_type: light_type=\\\"\" << rec.light_type //红绿灯类型名称\n << \"\\\", light_desc=\\\"\" << rec.light_desc //红绿灯类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询light_type表,失败返回NULL\nnad_record_light_type *db_query_light_type(string light_type);\n\n//查询light_type表的所有记录,不会失败\nmap<string, nad_record_light_type> &db_query_all_light_type();\n\n\n/*-------------------------------------------------------\n * light: 红绿灯\n-------------------------------------------------------*/\n\n//light表记录\nclass nad_record_light\n{\npublic:\n string light_id; //红绿灯ID(经度纬度)\n string light_type; //红绿灯类型\n uint light_status; //红绿灯状态\n string light_ip; //红绿灯的IP\n int64 stop_lane_id; //管理道路的高速道ID\n\n //赋值/打印调试信息\n nad_record_light & operator = (const nad_record_light &rec);\n friend inline ostream & operator << (ostream & os, nad_record_light &rec)\n {\n cout << \"light: light_id=\\\"\" << rec.light_id //红绿灯ID(经度纬度)\n << \"\\\", light_type=\\\"\" << rec.light_type //红绿灯类型\n << \"\\\", light_status=\\\"\" << rec.light_status //红绿灯状态\n << \"\\\", light_ip=\\\"\" << rec.light_ip //红绿灯的IP\n << \"\\\", stop_lane_id=\\\"\" << rec.stop_lane_id //管理道路的高速道ID\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询light表,失败返回NULL\nnad_record_light *db_query_light(string light_id);\n\n//查询light表的所有记录,不会失败\nmap<string, nad_record_light> &db_query_all_light();\n\n\n/*-------------------------------------------------------\n * limspeed_type: 限速牌类型\n-------------------------------------------------------*/\n\n//limspeed_type表记录\nclass nad_record_limspeed_type\n{\npublic:\n string limspeed_type; //限速牌类型名称\n string limspeed_desc; //限速牌类型描述\n\n //赋值/打印调试信息\n nad_record_limspeed_type & operator = (const nad_record_limspeed_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_limspeed_type &rec)\n {\n cout << \"limspeed_type: limspeed_type=\\\"\" << rec.limspeed_type //限速牌类型名称\n << \"\\\", limspeed_desc=\\\"\" << rec.limspeed_desc //限速牌类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询limspeed_type表,失败返回NULL\nnad_record_limspeed_type *db_query_limspeed_type(string limspeed_type);\n\n//查询limspeed_type表的所有记录,不会失败\nmap<string, nad_record_limspeed_type> &db_query_all_limspeed_type();\n\n\n/*-------------------------------------------------------\n * limspeed: 限速牌\n-------------------------------------------------------*/\n\n//limspeed表记录\nclass nad_record_limspeed\n{\npublic:\n string limspeed_id; //限速牌ID(经度纬度)\n string limspeed_type; //限速牌类型\n uint limspeed_value; //限速牌当前取值(km/h)\n string limspeed_ip; //限速牌的IP\n int64 lane_id; //管理道路的高速道ID\n\n //赋值/打印调试信息\n nad_record_limspeed & operator = (const nad_record_limspeed &rec);\n friend inline ostream & operator << (ostream & os, nad_record_limspeed &rec)\n {\n cout << \"limspeed: limspeed_id=\\\"\" << rec.limspeed_id //限速牌ID(经度纬度)\n << \"\\\", limspeed_type=\\\"\" << rec.limspeed_type //限速牌类型\n << \"\\\", limspeed_value=\\\"\" << rec.limspeed_value //限速牌当前取值(km/h)\n << \"\\\", limspeed_ip=\\\"\" << rec.limspeed_ip //限速牌的IP\n << \"\\\", lane_id=\\\"\" << rec.lane_id //管理道路的高速道ID\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询limspeed表,失败返回NULL\nnad_record_limspeed *db_query_limspeed(string limspeed_id);\n\n//查询limspeed表的所有记录,不会失败\nmap<string, nad_record_limspeed> &db_query_all_limspeed();\n\n\n/*-------------------------------------------------------\n * block_type: 施工标志类型\n-------------------------------------------------------*/\n\n//block_type表记录\nclass nad_record_block_type\n{\npublic:\n string block_type; //施工标志类型名称\n string block_desc; //施工标志类型描述\n\n //赋值/打印调试信息\n nad_record_block_type & operator = (const nad_record_block_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_block_type &rec)\n {\n cout << \"block_type: block_type=\\\"\" << rec.block_type //施工标志类型名称\n << \"\\\", block_desc=\\\"\" << rec.block_desc //施工标志类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询block_type表,失败返回NULL\nnad_record_block_type *db_query_block_type(string block_type);\n\n//查询block_type表的所有记录,不会失败\nmap<string, nad_record_block_type> &db_query_all_block_type();\n\n\n/*-------------------------------------------------------\n * block: 施工标志\n-------------------------------------------------------*/\n\n//block表记录\nclass nad_record_block\n{\npublic:\n string block_id; //施工标志ID(经度纬度)\n string block_type; //施工标志类型\n uint block_value; //施工标志当前取值\n uint lane_count; //管理的车道数\n string block_ip; //施工标志的IP\n int64 lane_id; //管理道路的高速道ID\n\n //赋值/打印调试信息\n nad_record_block & operator = (const nad_record_block &rec);\n friend inline ostream & operator << (ostream & os, nad_record_block &rec)\n {\n cout << \"block: block_id=\\\"\" << rec.block_id //施工标志ID(经度纬度)\n << \"\\\", block_type=\\\"\" << rec.block_type //施工标志类型\n << \"\\\", block_value=\\\"\" << rec.block_value //施工标志当前取值\n << \"\\\", lane_count=\\\"\" << rec.lane_count //管理的车道数\n << \"\\\", block_ip=\\\"\" << rec.block_ip //施工标志的IP\n << \"\\\", lane_id=\\\"\" << rec.lane_id //管理道路的高速道ID\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询block表,失败返回NULL\nnad_record_block *db_query_block(string block_id);\n\n//查询block表的所有记录,不会失败\nmap<string, nad_record_block> &db_query_all_block();\n\n\n/*-------------------------------------------------------\n * ts_3rd_type: 第三方交通系统类型\n-------------------------------------------------------*/\n\n//ts_3rd_type表记录\nclass nad_record_ts_3rd_type\n{\npublic:\n string ts_3rd_type; //第三方交通系统类型名称\n string ts_3rd_desc; //第三方交通系统类型描述\n\n //赋值/打印调试信息\n nad_record_ts_3rd_type & operator = (const nad_record_ts_3rd_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_ts_3rd_type &rec)\n {\n cout << \"ts_3rd_type: ts_3rd_type=\\\"\" << rec.ts_3rd_type //第三方交通系统类型名称\n << \"\\\", ts_3rd_desc=\\\"\" << rec.ts_3rd_desc //第三方交通系统类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询ts_3rd_type表,失败返回NULL\nnad_record_ts_3rd_type *db_query_ts_3rd_type(string ts_3rd_type);\n\n//查询ts_3rd_type表的所有记录,不会失败\nmap<string, nad_record_ts_3rd_type> &db_query_all_ts_3rd_type();\n\n\n/*-------------------------------------------------------\n * ts_3rd: 第三方交通系统\n-------------------------------------------------------*/\n\n//ts_3rd表记录\nclass nad_record_ts_3rd\n{\npublic:\n string ts_3rd_name; //第三方交通系统名称\n string ts_3rd_type; //第三方交通系统类型\n string ts_3rd_ip; //第三方交通系统ip\n int ts_3rd_port; //第三方交通系统端口\n string ts_3rd_user; //第三方交通系统用户名\n string ts_3rd_password; //第三方交通系统密码\n\n //赋值/打印调试信息\n nad_record_ts_3rd & operator = (const nad_record_ts_3rd &rec);\n friend inline ostream & operator << (ostream & os, nad_record_ts_3rd &rec)\n {\n cout << \"ts_3rd: ts_3rd_name=\\\"\" << rec.ts_3rd_name //第三方交通系统名称\n << \"\\\", ts_3rd_type=\\\"\" << rec.ts_3rd_type //第三方交通系统类型\n << \"\\\", ts_3rd_ip=\\\"\" << rec.ts_3rd_ip //第三方交通系统ip\n << \"\\\", ts_3rd_port=\\\"\" << rec.ts_3rd_port //第三方交通系统端口\n << \"\\\", ts_3rd_user=\\\"\" << rec.ts_3rd_user //第三方交通系统用户名\n << \"\\\", ts_3rd_password=\\\"\" << rec.ts_3rd_password //第三方交通系统密码\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询ts_3rd表,失败返回NULL\nnad_record_ts_3rd *db_query_ts_3rd(string ts_3rd_name);\n\n//查询ts_3rd表的所有记录,不会失败\nmap<string, nad_record_ts_3rd> &db_query_all_ts_3rd();\n\n\n/*-------------------------------------------------------\n * crossing_type: 路口类型\n-------------------------------------------------------*/\n\n//crossing_type表记录\nclass nad_record_crossing_type\n{\npublic:\n string crossing_type; //路口类型名称\n string crossing_desc; //路口类型描述\n\n //赋值/打印调试信息\n nad_record_crossing_type & operator = (const nad_record_crossing_type &rec);\n friend inline ostream & operator << (ostream & os, nad_record_crossing_type &rec)\n {\n cout << \"crossing_type: crossing_type=\\\"\" << rec.crossing_type //路口类型名称\n << \"\\\", crossing_desc=\\\"\" << rec.crossing_desc //路口类型描述\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询crossing_type表,失败返回NULL\nnad_record_crossing_type *db_query_crossing_type(string crossing_type);\n\n//查询crossing_type表的所有记录,不会失败\nmap<string, nad_record_crossing_type> &db_query_all_crossing_type();\n\n\n/*-------------------------------------------------------\n * crossing: 路口\n-------------------------------------------------------*/\n\n//crossing表记录\nclass nad_record_crossing\n{\npublic:\n string crossing_id; //路口ID\n string crossing_type; //路口类型\n\n //赋值/打印调试信息\n nad_record_crossing & operator = (const nad_record_crossing &rec);\n friend inline ostream & operator << (ostream & os, nad_record_crossing &rec)\n {\n cout << \"crossing: crossing_id=\\\"\" << rec.crossing_id //路口ID\n << \"\\\", crossing_type=\\\"\" << rec.crossing_type //路口类型\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询crossing表,失败返回NULL\nnad_record_crossing *db_query_crossing(string crossing_id);\n\n//查询crossing表的所有记录,不会失败\nmap<string, nad_record_crossing> &db_query_all_crossing();\n\n\n/*-------------------------------------------------------\n * conflict: 冲突表\n-------------------------------------------------------*/\n\n//conflict表记录\nclass nad_record_conflict\n{\npublic:\n string conflict_id; //冲突表项ID\n string light_id; //对应的红绿灯\n int64 active_point; //激活点\n int64 stop_point; //停止点\n int64 exit_point; //离开点\n string conflict_list; //冲突表\n\n //赋值/打印调试信息\n nad_record_conflict & operator = (const nad_record_conflict &rec);\n friend inline ostream & operator << (ostream & os, nad_record_conflict &rec)\n {\n cout << \"conflict: conflict_id=\\\"\" << rec.conflict_id //冲突表项ID\n << \"\\\", light_id=\\\"\" << rec.light_id //对应的红绿灯\n << \"\\\", active_point=\\\"\" << rec.active_point //激活点\n << \"\\\", stop_point=\\\"\" << rec.stop_point //停止点\n << \"\\\", exit_point=\\\"\" << rec.exit_point //离开点\n << \"\\\", conflict_list=\\\"\" << rec.conflict_list //冲突表\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询conflict表,失败返回NULL\nnad_record_conflict *db_query_conflict(string conflict_id);\n\n//查询conflict表的所有记录,不会失败\nmap<string, nad_record_conflict> &db_query_all_conflict();\n\n\n/*-------------------------------------------------------\n * warning: 道路预警\n-------------------------------------------------------*/\n\n//warning表记录\nclass nad_record_warning\n{\npublic:\n string warning_id; //预警ID\n string warning_type; //预警类型\n string warning_desc; //预警描述\n int64 lane_id; //管理道路的高速道ID\n\n //赋值/打印调试信息\n nad_record_warning & operator = (const nad_record_warning &rec);\n friend inline ostream & operator << (ostream & os, nad_record_warning &rec)\n {\n cout << \"warning: warning_id=\\\"\" << rec.warning_id //预警ID\n << \"\\\", warning_type=\\\"\" << rec.warning_type //预警类型\n << \"\\\", warning_desc=\\\"\" << rec.warning_desc //预警描述\n << \"\\\", lane_id=\\\"\" << rec.lane_id //管理道路的高速道ID\n << \"\\\"\" << endl;\n return os;\n }\n};\n\n//使用主键查询warning表,失败返回NULL\nnad_record_warning *db_query_warning(string warning_id);\n\n//查询warning表的所有记录,不会失败\nmap<string, nad_record_warning> &db_query_all_warning();\n\n\n/*-------------------------------------------------------\n * 初始化数据库和退出数据库\n-------------------------------------------------------*/\n\n//初始化数据库\nint nad_db_init();\n\n//退出数据库\nvoid nad_db_free();\n\n\n#endif /*__NAD_DB_H__*/\n"
},
{
"alpha_fraction": 0.5913978219032288,
"alphanum_fraction": 0.6129032373428345,
"avg_line_length": 18.241378784179688,
"blob_id": "69d40d4263ae6afb0fa2a16f5160160cc8cb5baf",
"content_id": "74e2307f1c7ec54e49c907452c15a64673d94b81",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 558,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 29,
"path": "/athena/examples/LCM/Singlecar/launch_truck/bin/ds.sh",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n./dd.sh\nif [ -x *.osm ];then\n rm ./*.osm\nfi\nif [ -x *.log ];then\n rm ./*.log\nfi\nif [ -s ../log/ ];then\n rm ../log/*\nfi\nif [ -x ./1/LonTestData.txt ];then\n rm ./1/LonTestData.txt\nfi\nif [ -x ./2/LonTestData.txt ];then\n rm ./2/LonTestData.txt\nfi\nif [ -x ./3/LonTestData.txt ];then\n rm ./3/LonTestData.txt\nfi\nif [ -x ./1/lateral_control_log.txt ];then\n rm ./1/lateral_control_log.txt\nfi\nif [ -x ./2/lateral_control_log.txt ];then\n rm ./2/lateral_control_log.txt\nfi\nif [ -x ./3/lateral_control_log.txt ];then\n rm ./3/lateral_control_log.txt\nfi\n"
},
{
"alpha_fraction": 0.5143449902534485,
"alphanum_fraction": 0.5205668807029724,
"avg_line_length": 29.13541603088379,
"blob_id": "020a8c568c3f95d7305d74325ea7f75739dfef2f",
"content_id": "99c36892957d9e5f3ea42b191770b6df0cabfb68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3159,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 96,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/BaseDefine.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#ifndef\t_BASE_DEFINE_H_\n#define\t_BASE_DEFINE_H_\n\n#include <vector>\n//#include <tchar.h>\n\n#define CAMERACAN_WINDOWNAME _T(\"CameraCAN\")\n#define WM_MESSAGE_SIGNINFO WM_USER+10000\t\t// Sign 信息\n#define WM_MESSAGE_CAMERAINFO WM_USER+10001 // Camera 信息\n////////////////////////////////////////////////////////// 高精度导航 ////////////////////////////////////////////////////////////////////////////\n\n// Camera 信息\ntypedef struct _stCameraInfo\n{\n\tlong CameraCaptureTimestamp; // Camera采集时间戳\n\tlong CameraResultTimestamp; // Camera处理结果时间戳\n\tint LeftLane; // 左边车道数量\n\tint RightLane; // 右边车道数量\n\tint LDW; // 车线脱逸\n\tint ChangeLaneEvent; // 变道事件\n\tint LeftLineType; // 左边车线类型\n\tint RightLineType; // 右边车线类型\n\tint LeftLineWidth; // 左边车线线型宽度\n\tint RightLineWidth; // 右边车线线型宽度\n\tshort LeftLineDistance; // 到左边车线的垂直距离 (单位:mm)\n\tshort RightLineDistance; // 到右边车线的垂直距离 (单位:mm)\n\tshort SteeringAngle; // 航向角 (单位:角度)\n\tshort LateralDeviation; // 横向偏移\n\tshort SignType; // traffic sign 类型\n\tshort SignPositionX; // traffic sign Position X\n\tshort SignPositionY; // traffic sign Position Y\n\tshort SignPositionZ; // traffic sign Position Z\n\n\t_stCameraInfo()\n\t{\n\t\tCameraCaptureTimestamp = CameraResultTimestamp = 0;\n\t\tLeftLane = RightLane = LDW = ChangeLaneEvent = LeftLineType = RightLineType = LeftLineWidth = RightLineWidth = 0;\n\t\tLeftLineDistance = RightLineDistance = SteeringAngle = LateralDeviation = 0;\n\t\tSignType = SignPositionX = SignPositionY = SignPositionZ = 0;\n\t\t//Reserve = 0;\n\t}\n};\n\n//Line 信息\nstruct _LineInfo\n{\n int typeLR;\n int bottom_x;\n bool operator < (const _LineInfo& rhs ) const //升序排序时必须写的函数\n {\n return bottom_x < rhs.bottom_x;\n }\n};\n//********************************************************************************************************************************************//\n\nstruct point\n{\n int x,y;\n int road_x,road_y;\n point()\n {\n x=y=road_x=road_y=0;\n }\n};\n\nstruct _CurrentLine\n{\n point left_up,right_down,left_down,right_up;\n short SteeringAngle;\n _CurrentLine()\n {\n SteeringAngle=0;\n }\n\n};\n\nstruct Boundrary_Distance\n{\n short LeftLineDistance; // 到左边车线的垂直距离 (单位:mm)\n short RightLineDistance;\n short Distance;\n short Left_Dis_Change;\n short Right_Dis_Change;\n short Distance_Change;\n Boundrary_Distance()\n {\n LeftLineDistance=RightLineDistance=Distance=Left_Dis_Change=Right_Dis_Change=Distance_Change=0;\n }\n};\n\nstruct _Multiline\n{\n point left,right;\n};\n\n#endif\n"
},
{
"alpha_fraction": 0.5370762944221497,
"alphanum_fraction": 0.5646186470985413,
"avg_line_length": 14.22580623626709,
"blob_id": "2e6e9f487ce021be0a76f6b7ccb75a1384e001ba",
"content_id": "f5727aa0759d62e2b82a10b5eff8961d89b5a0f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 978,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 62,
"path": "/athena/core/x86/Control/include/trajectory.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file trajectory.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef TRAJECTORY_H_\n#define TRAJECTORY_H_\n\n#include <iostream>\n#include <vector>\n#include \"nav_points.h\"\n\nusing namespace std;\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class Trajectory\n *\n * @brief motion Trajectory.\n */\nclass Trajectory\n{\n public:\n Trajectory()\n {\n gps_time_ = 0.0;\n num_of_points_ = 0;\n type_ = 0;\n reserved_ = 0;\n points_.clear();\n car_action_ = 0;\n driving_mode_ = 0;\n }\n ~Trajectory() = default;\n\n ///GPS时间\n double gps_time_;\n ///轨迹点数量\n int32_t num_of_points_;\n ///轨迹点类型\n int32_t type_;\n ///保留\n double reserved_;\n ///轨迹点\n std::vector<NavPoints> points_;\n int32_t car_action_;\n int32_t driving_mode_;\n};\n\n}\n}\n\n#endif // TRAJECTORY_H_\n"
},
{
"alpha_fraction": 0.5551232099533081,
"alphanum_fraction": 0.5551232099533081,
"avg_line_length": 20.41666603088379,
"blob_id": "d3398a68b0483473036077e15031cb8e09ca2094",
"content_id": "8e6448d209545907f7c40d6c01ccc613a662f8fa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 809,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 36,
"path": "/athena/core/x86/Control/include/common/map_matching/map_matching.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <math.h>\n\n#include \"cs.h\"\n#include \"../path.h\"\n\nclass map_matching\n{\npublic:\n // 最近的匹配点中的相关信息。\n int last_match_point_no;\n int next_match_point_no;\n\npublic:\n int current_match_point_no;\n\npublic:\n map_matching();\n ~map_matching();\n\n void init();\n\n // 地图匹配算法\n int MapMarch_Min_Distance_motion_planning(double Current_X,\n double Current_Y,\n path *p, int length,\n double& min_error);\n\n int find_next_moition_planning_points(path p, double length);\n\n int find_moition_planning_before_points(path p, double length);\n\n int find_moition_planning_start_points(path p, double length);\n\n};\n"
},
{
"alpha_fraction": 0.5832699537277222,
"alphanum_fraction": 0.5931559205055237,
"avg_line_length": 18.62686538696289,
"blob_id": "c5d404c9b665696fd1c62a7d7b61b509c0dc7a10",
"content_id": "eb708b4fce5333241def707d75f60caea15aa22e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1595,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 67,
"path": "/athena/core/arm/Common/include/distributed_runtime/session/nad_session.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_session.h\n * 时 间:2016-03-02\n * 描 述:SESSION(控制块)的基类\n-------------------------------------------------------*/\n#ifndef _NAD_SESSION_H\n#define _NAD_SESSION_H\n\n#include \"../timer/nad_timer.h\"\n\n//SESSION(控制块)的基类\nclass nad_session\n{\npublic:\n //对象名称\n string name;\n\npublic:\n //构造析构函数\n nad_session(string name);\n virtual ~nad_session();\n\n //处理定时器\n virtual void handle_timer();\n};\n\n//SESSION(控制块)管理器的基类\nclass nad_session_list\n{\npublic:\n //SESSION(控制块)列表\n map<string, nad_session*> session_map;\n map<int,nad_session*> session_map1;\n\n //最大session数量\n int max_size;\n\n //定时器\n nad_timer *timer;\n\npublic:\n //构造析构函数,参数(最大session数&0=无限, 定时器间隔&0=不起定时器)\n nad_session_list(int max_size, int64 interval_ms);\n virtual ~nad_session_list();\n\n //获得session数量,禁止重载\n int size();\n\n //新增SESSION(控制块),session->name不能重复!\n virtual int add_session(nad_session *session);\n\n virtual int add_session(nad_session *session,int index);\n\n //查询SESSION(控制块),name不能重复!\n virtual nad_session *find_session(string name);\n\n //删除SESSION(控制块),成功返回ERT_OK\n virtual int delete_session(string name);\n\n //删除所有SESSION(控制块)\n virtual void clear_session();\n\n //处理定时器,通常不用重载\n virtual void handle_timer();\n};\n\n#endif\n"
},
{
"alpha_fraction": 0.6748466491699219,
"alphanum_fraction": 0.7116564512252808,
"avg_line_length": 22.285715103149414,
"blob_id": "de999ba3a79accc94ad4b9bb5e4bf5d5f8eeda51",
"content_id": "8aea9507f439077aaa1b58c5cc7563237bc48d6a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 171,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 7,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/brake/TRUCK_J6P/truck_j6p_deceleration_brake_map.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"truck_j6p_deceleration_brake_map.h\"\n\n double TruckJ6pGetBrakeVaule(double deceleration)\n {\n //return (deceleration*10.0);//仿真时为bar\n return 0;\n }\n"
},
{
"alpha_fraction": 0.6904397010803223,
"alphanum_fraction": 0.705777108669281,
"avg_line_length": 24.402597427368164,
"blob_id": "d21f6287817fac6346738d9765720a07cfc0a236",
"content_id": "5c5dc2f3084b919abaa4d2874328ec600bc8119e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4262,
"license_type": "no_license",
"max_line_length": 174,
"num_lines": 154,
"path": "/athena/core/arm/Control/include/lat_controller/lat_controller.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file lat_controller.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROLLER_LAT_LATCONTROLLER_H_\n#define CONTROLLER_LAT_LATCONTROLLER_H_\n\n#include \"../generic_controller.h\"\n#include <math.h>\n#include \"../common/math_util.h\"\n#include \"../common/kalman_filter.h\"\n#include \"../common/kalman_filter_app.h\"\n\n/**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\n/**\n * @class LatController\n *\n * @brief lateral controller, to compute steering values.\n */\nclass LatController:public GenericController{\n public:\n /**\n * @brief constructor\n */\n LatController() = default;\n\n /**\n * @brief destructor\n */\n ~LatController() = default;\n\n/**\n * @brief init 初始化.\n * @param[in] controller_config controller config.\n * @return true or false.\n */\n bool Init(const ControllerConfig controller_config);\n\n /**\n * @brief ComputeControlOutput 横向控制计算.\n * @param[in] path 轨迹.\n * @param[in] match_point_no 轨迹匹配索引.\n * @param[in] localiation 定位信息.\n * @param[in] chassis 车辆底盘信息.\n * @param[in] ControllerOutput 控制器输出.\n * @return true or false.\n */\n bool ComputeControlOutput(path * path,uint32_t match_point_no,const LocalLocalization * local_localiation,const Chassis *chassis,ControllerOutput * controller_output);\n\n /**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n void SetTarSpeedDebug(int32_t tar_speed,bool valid);\n private:\n ///最大曲率\n const double path_max_ks_ = 0.2;\n const double path_max_ks_u_ = 0.004;\n const double path_max_max_ks_u_ = 0.01;\n\n //const double m_pi = 3.14159265358979323846;\n ///地图匹配的最近点的距离 match_map_err\n double match_map_err_=0.0;\n double derta_fi_radian_=0.0;\n ///转向角速度\n const double steering_angle_speed_ = 300;\n ///方向反馈转角\n double feedback_steer_=0;\n\n ///角度误差\n double derta_fi_ = 0.0;\n double fi_err_hybird_ = 0.0;\n ///目标航向角速度误差\n double heading_speed_err;\n\n double kp_;\n double ki_;\n double kd_;\n\n double motion_curvature_last=0;\n\n ControllerConfig controller_config_;\n string start_time;\n\n/**\n * @brief LateralHybridControl 横向控制.\n * @param[in] local_path 轨迹.\n * @param[in] match_point_no 轨迹匹配索引.\n * @param[in] local_localiation 定位信息.\n * @param[in] chassis 车辆底盘信息.\n * @param[in] controller_output 控制输出.\n * @return void.\n */\n void LateralHybridControl(path * local_path,uint32_t match_point_no,const LocalLocalization * local_localiation,const Chassis *chassis,ControllerOutput * controller_output);\n\n/**\n * @brief CalculateFeedwardSteerAngle 计算前馈转角.\n * @param[in] local_path 轨迹.\n * @param[in] motion_curvature 规划层曲率.\n * @param[in] match_point 匹配点.\n * @return void.\n */\n void CalculateFeedwardSteerAngle(path * recv_path, double motion_curvature, int match_point);\n\n /**\n * @brief CalculateMappingParameters 计算位置误差和角度误差.\n * @param[in] p 轨迹.\n * @param[in] match_point 匹配点.\n * @return void.\n */\n void CalculateMappingParameters(path *p, int match_point_no,const LocalLocalization * local_localiation);\n\n double LateralHybridFeedbackControl(path *recv_path, int match_point_no,double e_err, double fi_err,double heading_speed_err);\n\n void LateralOutputSteering(double wheel_angle,double steering_angle_feedback,ControllerOutput * controller_output);\n\n /**\n * @brief CalculateDertaFiByTable 计算角度误差\n * @param[in] current_heading 当前头指向.\n * @param[in] head_c 规划头指向.\n * @param[in] p_derta_fi 头指向.\n * @param[in] p_derta_fi_radian 头指向.\n * @return void.\n */\n void CalculateDertaFiByTable(double current_heading,double head_c,double* p_derta_fi,double* p_derta_fi_radian);\n\n double CalculateErrorSignAndVaule(double x, double y, double yaw,double x_c, double y_c, double yaw_c);\n\n/**\n * @brief 获取调试信息.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n void GetControllerDebugInfo(DebugOutput &debug_output);\n\n\n\n};\n}//namespace control\n}//namespace athena\n#endif //CONTROLLER_LATERAL_LATCONTROLLER_H\n"
},
{
"alpha_fraction": 0.6926605701446533,
"alphanum_fraction": 0.7018348574638367,
"avg_line_length": 13.466666221618652,
"blob_id": "458460f7d49daa342ac5c6c6824b6d646b4fd6a8",
"content_id": "5c41517ed7b52e477711b2b4e62d0546dc5ba12b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 218,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 15,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/gear/TRUCK_J6P/truck_j6p_gear_control.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#include \"truck_j6p_gear_control.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n\nGearLevel TruckJ6pGearControl::GetGearLevel()\n{\n return POSITION_P;\n}\n}\n}\n"
},
{
"alpha_fraction": 0.6355999708175659,
"alphanum_fraction": 0.6453999876976013,
"avg_line_length": 21.41703987121582,
"blob_id": "bab30657eb2e1c4867b1fd01715d1ae4944c44d9",
"content_id": "a18cb319b8795758d7628b5c26daef699dd89744",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5538,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 223,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/control_logic.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n/**\n * @file control_logic.h\n * @author jiang <[email protected]>\n * @date 2018-07-07\n * @version 1.0.0\n * @par Copyright(c)\n * hy\n */\n\n#ifndef CONTROL_LOGIC_CONTROL_LOGIC_H_\n#define CONTROL_LOGIC_CONTROL_LOGIC_H_\n\n#include \"chassis.h\"\n#include \"trajectory.h\"\n#include \"localization.h\"\n#include \"controller_output.h\"\n#include \"../common/control_cmd.h\"\n#include \"../common/chassis_detail.h\"\n#include \"../common/emergency.h\"\n#include \"../common/Thread.h\"\n#include \"../common/bcm_control_cmd.h\"\n#include \"../common/enum.h\"\n#include \"../common/logging.h\"\n#include \"../common/get_time.h\"\n#include \"../common/logging.h\"\n#include \"../common/control_info_report.h\"\n#include \"controller.h\"\n//#include \"../apps/control_view/control_view.h\"\n#include \"control_logic_config.h\"\n#include \"gear/gear_control.h\"\n\n#include \"acc/cs55/cs55_torque_speed_throttle_map.h\"\n#include \"brake/cs55/cs55_deceleration_brake_map.h\"\n#include \"gear/cs55/cs55_gear_control.h\"\n#include \"control_logic_debug_output.h\"\n\n#include \"acc/TRUCK_J6P/truck_j6p_torque_speed_throttle_map.h\"\n#include \"brake/TRUCK_J6P/truck_j6p_deceleration_brake_map.h\"\n#include \"gear/TRUCK_J6P/truck_j6p_gear_control.h\"\n\n /**\n * @namespace athena::control\n * @brief athena::control\n */\nnamespace athena{\nnamespace control{\n /**\n * @class ControlLogic\n * @brief 控制逻辑类.\n */\nclass ControlLogic:public Thread{\n public:\n ControlLogic() = default;\n ~ControlLogic() = default;\n\n void Init(ControlLogicConfig control_logic_config);\n\n /**\n * @brief 轨迹消息接收.\n * @param[in] trajectory 规划轨迹.\n * @return void.\n */\n void \tSubscribeTrajectory(Trajectory trajectory);\n\n /**\n * @brief 定位信息接收.\n * @param[in] localization 定位信息.\n * @return void.\n */\n void \tSubscribeLocalization(Localization localization);\n\n/**\n * @brief 底盘信息接收.\n * @param[in] chassis 车辆底盘信息.\n * @return void.\n */\n void \tSubscribeChassis(ChassisDetail chassis_detail);\n\n /**\n * @brief BCM信息接收.\n * @param[in] chassis 车辆底盘信息.\n * @return void.\n */\n void \tSubscribeBcmControl(BcmControlCmd bcm_control_cmd);\n\n /**\n * @brief 获取控制命令.\n * @param[in] control_cmd 控制命令.\n * @return void.\n */\n void GetControlCmd(ControlCmd &control_cmd);\n\n /**\n * @brief 获取BCM控制命令.\n * @param[in] bcm_control_cmd BCM控制命令.\n * @return void.\n */\n void GetBcmControlCmd(BcmControlCmd &bcm_control_cmd);\n\n /**\n * @brief 获取控制信息.\n * @param[in] control_info_report 控制信息.\n * @return void.\n */\n void GetControlInfoReport(ControlInfoReport &control_info_report);\n\n /**\n * @brief 获取控制器报警信息.\n * @param[in] emergency 紧急事件信息.\n * @return true=紧急事件发横 false=无紧急事件发生.\n */\n bool GetControllerAlarmInfo(Emergency *emergency);\n\n /**\n * @brief 获取调试信息.\n * @param[in] control_logic_debug_output.\n * @return void.\n */\n void GetControlLogicDebugOutput(ControlLogicDebugOutput &control_logic_debug_output);\n\n /**\n * @brief GetControllerInfo.\n * @param[in] debug_output 调试输出.\n * @return void.\n */\n void GetControllerInfo(DebugOutput &debug_output);\n\n /**\n * @brief SetDrivingModeDebug 设置驾驶模式用作调试.\n * @param[in] mode 0 无效 1 人工驾驶 3 自动驾驶.\n * @return void.\n */\n void SetDrivingModeDebug(int32_t mode);\n\n /**\n * @brief SetTarSpeedDebug 设置推荐速度.\n * @param[in] tar_speed 推荐速度.\n * @param[in] valid 是否有效.\n * @return void.\n */\n void SetTarSpeedDebug(int32_t tar_speed,bool valid);\n\n /**\n * @brief GetSteeringAngleFeedback 获取车辆实际转向角.\n * @param[in] void.\n * @return 转向角.\n */\n double GetSteeringAngleFeedback();\n\n ControllerConfig controller_config_;\n ControlLogicConfig control_logic_config_;\n private:\n ///控制器\n Controller controller_;\n ///轨迹\n Trajectory trajectory_;\n ///定位\n Localization localization_;\n ///控制命令\n ControlCmd control_cmd_;\n ///控制器输出\n ControllerOutput controller_output_;\n ///底盘信息\n Chassis chassis_;\n ///底盘详细信息\n ChassisDetail chassis_detail_;\n ///bcm控制信息\n BcmControlCmd bcm_control_cmd_;\n ///驾驶模式\n int32_t driving_mode_;\n ///调试输出\n ControlLogicDebugOutput control_logic_debug_output_;\n\n /**\n * @brief 线程运行函数.\n * @return void.\n */\n void run();\n\n /**\n * @brief 计算控制输出.\n * @return void.\n */\n void ComputeControlOutputOnTimer();\n\n ///CS55档位控制器\n CS55GearControl cs55_gear_control_;\n ///J6P档位控制器\n TruckJ6pGearControl truck_j6p_gear_control_;\n ///档位控制\n GearControl *gear_control_;\n\n /**\n * @brief 注册档位控制.\n * @param[in] gear_control 档位控制.\n * @return void.\n */\n void RegisterGearControl(GearControl *gear_control);\n\n /**\n * @brief 获取驾驶模式.\n * @return 驾驶模式参考DrivingMode.\n */\n int32_t GetDrivingMode();\n\n /**\n * @brief 设置驾驶模式.\n * @param[in] driving_mode 驾驶模式.\n * @return void.\n */\n void SetDrivingMode(int driving_mode);\n\n /**\n * @brief 获取驾驶模式.\n * @return DrivingMode 驾驶模式参考DrivingMode.\n */\n void SetControlCmd(ControlCmd *control_cmd,ControllerOutput controller_output);\n\n};\n}\n}\n\n#endif // CONTROL_LOGIC_CONTROL_LOGIC_H_\n"
},
{
"alpha_fraction": 0.6907216310501099,
"alphanum_fraction": 0.6939772367477417,
"avg_line_length": 22.628204345703125,
"blob_id": "a22206042bb5d13cf637c31a0f19422b33d5acf5",
"content_id": "655538075a41589644d94894320b3214a8bf4af0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1844,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 78,
"path": "/athena/core/arm/Map/include/LineStrip.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*\n * © 2014 by Philipp Bender <[email protected]>\n *\n * This file is part of libLanelet.\n *\n * libLanelet is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * libLanelet is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with libLanelet. If not, see <http://www.gnu.org/licenses/>.\n */\n\n#pragma once\n\n#include \"Attribute.hpp\"\n#include \"lanelet_point.hpp\"\n#include \"BoundingBox.hpp\"\n#include <vector>\n#include <memory>\n#include <tuple>\n\nnamespace LLet\n{\n\nclass LineStrip\n{\n\npublic:\n virtual const std::vector< point_with_id_t >& pts() const = 0;\n virtual BoundingBox bb() const;\n};\n\ntypedef std::shared_ptr< LineStrip > strip_ptr_t;\n\nclass OSMLineStrip : public LineStrip\n{\npublic:\n OSMLineStrip();\n AttributeMap _attributes;\n virtual const std::vector< point_with_id_t >& pts() const;\n std::vector< point_with_id_t > _pts;\nprivate:\n\n};\n\nclass CompoundLineStrip : public LineStrip\n{\npublic:\n CompoundLineStrip( const std::vector< std::shared_ptr< LineStrip > > & strips );\n virtual const std::vector< point_with_id_t >& pts() const;\n\nprivate:\n std::vector< point_with_id_t > _pts;\n\n};\n\nclass ReversedLineStrip : public LineStrip\n{\npublic:\n ReversedLineStrip( std::shared_ptr< LineStrip > parent );\n\n virtual const std::vector< point_with_id_t >& pts() const;\n\nprivate:\n const std::shared_ptr< LineStrip > _parent;\n std::vector< point_with_id_t > _pts;\n\n\n};\n\n}\n"
},
{
"alpha_fraction": 0.7246376872062683,
"alphanum_fraction": 0.7463768124580383,
"avg_line_length": 18.571428298950195,
"blob_id": "abfeea026956db89b9a4384685943479566c9681",
"content_id": "b6f853e2390dd4596c5254d7082a6658e2c35f72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 138,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 7,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/acc/TRUCK_J6P/truck_j6p_torque_speed_throttle_map.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n#include \"truck_j6p_torque_speed_throttle_map.h\"\n\ndouble TruckJ6pGetAccValue(double speed, double accel)\n{\n\t//return accel;\n\treturn 0;\n}\n"
},
{
"alpha_fraction": 0.8231440782546997,
"alphanum_fraction": 0.8245996832847595,
"avg_line_length": 46.91860580444336,
"blob_id": "c21fde8346a4a5ca00413ed8c35b5b8582a19c2c",
"content_id": "d5ed2fa71359071a7c64990f567eb73184d8712c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4122,
"license_type": "no_license",
"max_line_length": 233,
"num_lines": 86,
"path": "/athena/core/x86/Camera/lane_detect/include/LaneDraw.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include <string>\n#include <vector>\n#include \"bean/BaseDefine.h\"\n#include \"utils/colormisc.h\"\n#include \"utils/config.h\"\n#include \"utils/config2.h\"\n#include \"utils/imrgb.h\"\n\n#include \"bean/Lane.h\"\n#include \"utils/roadimage_window.h\"\n#include \"utils/lm_type.h\"\n#include \"utils/matutil-d.h\"\n#include \"main_proc.h\"\n#include \"utils/my_resource.h\"\n#include \"utils/tmc_stereobmp-forMono.h\"\n#include \"bean/LaneMarkerPoints.h\"\n#include \"LaneDetector.h\"\n#include \"utils/type.h\"\n#include \"utils/globalVal.h\"\nusing namespace std;\nclass LaneDraw\n{\npublic:\n int judge_lane_flag = 0;\n vector<point> leftPoints;\n vector<point> rightPoints;\n vector<point> leftImagePoints, rightImagePoints;\n\tBOOL drawBoundaryPointOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawDistanceLinesOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawEdgePointsOnRoadOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawGridMap1DFunc(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryAvailableFlagOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryDetectionStatusOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryPointsForLaneParamterEstimationOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryPointsForLaneParamterEstimationOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryPointsInFarAreaOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryPointsInNearAreaOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryPointsOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryWithLaneParamterOneSideAndLaneMarkerLineSequencesOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\tBOOL drawLaneBoundaryWithLaneParamterCenterLineOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n void drawLaneBoundaryByPreviousFrame(IMRGB *pImrgb);\n IplImage* drawDetectObjectOnInputImage(IplImage* Im_output_,LaneDetector *pLaneDetector);\n\n\tBOOL drawLaneBoundaryWithLaneParamterOneSideOfNearAreaOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryWithLaneParamterOneSideOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneBoundaryWithLaneParamterOneSideOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector, int iLR);\n\tBOOL drawLaneBoundaryWithLaneParamterOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneCenterOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkerLineSequencesOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkerLineSequencesOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkerLinesOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkerLinesOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkerPointOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkerPoints(IMRGB *pImrgb, LaneMarkerPoints *pUpEdgePoints,LaneMarkerPoints *pDownEdgePoints);\n\tBOOL drawLaneMarkersOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawLaneMarkersOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawObserverPointOnRoadImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL drawProcLineLinesOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tvoid DrawResultFunc(IMRGB *pImrgb);\n\tBOOL drawSelectedLinesOnInputImage(IMRGB *pImrgb, LaneDetector *pLaneDetector);\n\tBOOL overlay3DImageOnInputImage(IMRGB *pImrgbInput);\n\tBOOL overlayRoadImageOnInputImage(IMRGB *pImrgbInput);\n};\n\nclass lane_lateral_LR_point\n{\npublic:\n\n double Left_point;\n double Right_point;\n lane_lateral_LR_point()\n {\n Left_point=0;\n Right_point=0;\n }\n\n};\n\nclass lane_lateral_LR_point_list\n{\n public:\n int point_num;\n std::vector< lane_lateral_LR_point > LR_point_list;\n};\n\n"
},
{
"alpha_fraction": 0.3992016017436981,
"alphanum_fraction": 0.4151696562767029,
"avg_line_length": 16.89285659790039,
"blob_id": "f678c8e8cc2bbf9f2603f74e15cdae50cdbd6a6c",
"content_id": "da30cb7ec8d06f24264c9b86cef78ec7a1a70246",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 635,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 28,
"path": "/athena/core/arm/Common/include/base/log/nad_glog.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:nad_glog.h\n * 创建者:李思政\n * 时 间:2016-03-03\n * 描 述:日志api包含的头文件\n-------------------------------------------------------*/\n#ifndef _NAD_GLOG_H\n#define _NAD_GLOG_H\n\n//初始化日志\nvoid nad_log_init();\n\n//关闭日志\nvoid nad_log_free();\n\n/**\n * 记日志的接口:\n * 普通日志\n * LOG(INFO) << \"error log\";\n * 警告日志\n * LOG(WARNING) << \"error log\";\n * 错误日志\n * LOG(ERROR) << \"error log\";\n * 致命错误日志(调用后程序自动退出)\n * LOG(FATAL) << \"error log\";\n */\n\n#endif\n"
},
{
"alpha_fraction": 0.6112532019615173,
"alphanum_fraction": 0.6202046275138855,
"avg_line_length": 25.965517044067383,
"blob_id": "4a194271ccb43881e07d150cce1bcbcab649b9bc",
"content_id": "90f7b186389a97e0e6d44398c57f254cd41ddce0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1874,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 58,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/vision_detect_node.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/// //////////////////////////////////////////////\n///@file 加载caffe-ssd网络模型和权重文件\n///@brief 读取图片方法 目标框列表 相机自身配置参数\n///@author duohaoxue\n///@version v1.0\n///@date 2018.07.17\n/// //////////////////////////////////////////////\n#pragma once\n#include \"../include/vision_detector.hpp\"\n#include \"../include/camera_obj_list.hpp\"\n#include \"../include/distance_calculation.hpp\"\nusing namespace caffe;\nusing namespace std;\n\nclass vision_detect\n{\npublic:\n ///卷积特征目标检测 Caffe based Object Detection ConvNet\n\tDetector* ssd_detector_;\n ///当前帧图片得到多个目标列表\n camera_objs_list get_camera_objs;\n ///计算相机到目标物体距离\n Calculate_distance cal_distance;\n ///过滤所需置信度最低阀值 The minimum score required to filter the detected objects by the ConvNet\n\tfloat min_score_threshold;\n\t///选用GPU的id 编号 If GPU is enabled, stores the GPU Device to use\n\tunsigned int gpu_device_id;\n\t///是否用GPU Sets whether or not use GPU acceleration\n\tbool use_gpu;\n /// 是否显示出BoundingBox\n bool draw_flag;\n /// 输入均值文件\n string mean_file_;\n /// 图片均值像素\n string pixel_mean_;\n ///网络模型文件\n string network_Model ;\n /// 训练权重文件\n string pretrained_Weights ;\n /// 处理图片Mat类\n cv::Mat image;\n\npublic:\n /// 加载配置参数\n void read_config_value_from_file();\n ///匹配相交面积比IOU\n float bboxOverlap(cv::Rect &box1,cv::Rect &box2);\n /// Bounding box 欧式距离\n float bboxDistance(cv::Rect &box1,cv::Rect &box2);\n /// 卷积特征转化为图片目标框\n void convert_rect_to_image_obj();\n /// 画出图像目标框\n void Draw_obj_from_image();\n /// 析够函数\n ~vision_detect();\n\n\t vision_detect();\n};\n"
},
{
"alpha_fraction": 0.5709359645843506,
"alphanum_fraction": 0.5788177251815796,
"avg_line_length": 22.06818199157715,
"blob_id": "343b6f68f13fb6f8fe6ebaed6a6d0a57f286b08e",
"content_id": "54846f329a9cd2dac766fdcc32439a0ede337461",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2030,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 88,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneMarkerLineSequence.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include \"../utils/config.h\"\n#include \"../utils/tmc_stereobmp-forMono.h\"\n#include \"LaneDetectorTools.h\"\n#include \"LaneMarkerLine.h\"\n\nclass LaneMarkerLineSequence\n{\nprivate:\n int _iUD;\t// UD_UP or UD_DOWN\n BOOL\t_bOn3DPlaneBoundary;\n double _adAverageHeight[2];\n LaneMarkerLine *_pLML[NF_NUM];\n\npublic:\n inline LaneMarkerLineSequence(void)\n {\n _iUD = -1;\n _bOn3DPlaneBoundary = FALSE;\n _adAverageHeight[0] = 0;\n _adAverageHeight[1] = 0;\n _pLML[NF_NEAR] = NULL;\n _pLML[NF_FAR] = NULL;\n }\n inline LaneMarkerLineSequence(LaneMarkerLineSequence *p)\n {\n UD(p->UD());\n setFlagOn3DPlaneBoundary(p->getFlagOn3DPlaneBoundary());\n for(int iNF = 0; iNF < NF_NUM; iNF++)\n {\n LaneMarkerLine *pLML = p->getLaneMarkerLine(iNF);\n _pLML[iNF] = new LaneMarkerLine(pLML);\n }\n }\n inline ~LaneMarkerLineSequence(void)\n {\n for(int iNF = 0; iNF < NF_NUM; iNF++)\n {\n SAFE_DELETE(_pLML[iNF]);\n }\n }\n inline int UD(void)\n {\n return _iUD;\n }\n inline void UD(int iUD)\n {\n _iUD = iUD;\n }\n inline LaneMarkerLine\t*Near(void)\n {\n return _pLML[NF_NEAR];\n }\n inline LaneMarkerLine\t*Far(void)\n {\n return _pLML[NF_FAR];\n }\n inline void\tNear(LaneMarkerLine *p)\n {\n _pLML[NF_NEAR] = p;\n }\n inline void\tFar(LaneMarkerLine *p)\n {\n _pLML[NF_FAR] = p;\n }\n inline LaneMarkerLine *getLaneMarkerLine(int iNF)\n {\n return _pLML[iNF] ;\n }\n inline void setLaneMarkerLine(int iNF, LaneMarkerLine *pLML)\n {\n _pLML[iNF] = pLML;\n }\n inline BOOL getFlagOn3DPlaneBoundary(void)\n {\n return\t_bOn3DPlaneBoundary;\n }\n inline void setFlagOn3DPlaneBoundary(BOOL bV)\n {\n _bOn3DPlaneBoundary = bV;\n }\n inline double *getAverageHeight(void)\n {\n return _adAverageHeight;\n }\n};\n\ntypedef LaneMarkerLineSequence * ptrLaneMarkerLineSequence;\n"
},
{
"alpha_fraction": 0.25510644912719727,
"alphanum_fraction": 0.2559047043323517,
"avg_line_length": 48.79179763793945,
"blob_id": "cca4e742b4da2d1490e187230ccde834f192b54a",
"content_id": "e12ec1f191ee88cd869d77534aa7590a5005cea9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 78920,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 1585,
"path": "/athena/core/directoryList.md",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "|-- athena\n |-- arm\n | |-- Common\n | | |-- readme\n | | |-- include\n | | | |-- base\n | | | | |-- nad_base.h\n | | | | |-- nad_enum.h\n | | | | |-- nad_function.h\n | | | | |-- nad_retcode.h\n | | | | |-- nad_type.h\n | | | | |-- config\n | | | | | |-- Config.h\n | | | | | |-- nad_config.h\n | | | | | |-- route_config.h\n | | | | |-- db\n | | | | | |-- nad_db.h\n | | | | |-- log\n | | | | | |-- nad_glog.h\n | | | | |-- xml\n | | | | |-- pugiconfig.hpp\n | | | | |-- pugixml.hpp\n | | | |-- distributed_runtime\n | | | | |-- info\n | | | | | |-- nad_info.h\n | | | | | |-- nad_speed.h\n | | | | |-- session\n | | | | | |-- nad_session.h\n | | | | |-- starter\n | | | | | |-- nad_starter.h\n | | | | |-- timer\n | | | | |-- nad_timer.h\n | | | |-- oam\n | | | | |-- task\n | | | | |-- nad_task_func.h\n | | | | |-- nad_task_userfunc.h\n | | | |-- route\n | | | |-- LocalGeographicCS.hpp\n | | | |-- convert_coordinates.hpp\n | | | |-- heading.h\n | | | |-- math_util.h\n | | |-- lib\n | | |-- libcommon.so\n | |-- Control\n | | |-- include\n | | | |-- chassis.h\n | | | |-- controller.h\n | | | |-- controller_agent.h\n | | | |-- controller_alarm_code.h\n | | | |-- controller_config.h\n | | | |-- controller_output.h\n | | | |-- controller_output_alarm.h\n | | | |-- controller_output_alarm_code.h\n | | | |-- debug_output.h\n | | | |-- gear_position.h\n | | | |-- generic_controller.h\n | | | |-- local_localization.h\n | | | |-- localization.h\n | | | |-- localization_.h\n | | | |-- nav_points.h\n | | | |-- navi_point.h\n | | | |-- scheduler.h\n | | | |-- script.sh\n | | | |-- trajectory.h\n | | | |-- common\n | | | | |-- LocalGeographicCS.hpp\n | | | | |-- cputime.h\n | | | | |-- interpolation_1d.h\n | | | | |-- interpolation_2d.h\n | | | | |-- kalman_filter.h\n | | | | |-- kalman_filter_app.h\n | | | | |-- math_util.h\n | | | | |-- navi_point.h\n | | | | |-- path.h\n | | | | |-- eigen3\n | | | | | |-- signature_of_eigen3_matrix_library\n | | | | | |-- Eigen\n | | | | | | |-- Cholesky\n | | | | | | |-- CholmodSupport\n | | | | | | |-- Core\n | | | | | | |-- Dense\n | | | | | | |-- Eigen\n | | | | | | |-- Eigenvalues\n | | | | | | |-- Geometry\n | | | | | | |-- Householder\n | | | | | | |-- IterativeLinearSolvers\n | | | | | | |-- Jacobi\n | | | | | | |-- LU\n | | | | | | |-- MetisSupport\n | | | | | | |-- OrderingMethods\n | | | | | | |-- PaStiXSupport\n | | | | | | |-- PardisoSupport\n | | | | | | |-- QR\n | | | | | | |-- QtAlignedMalloc\n | | | | | | |-- SPQRSupport\n | | | | | | |-- SVD\n | | | | | | |-- Sparse\n | | | | | | |-- SparseCholesky\n | | | | | | |-- SparseCore\n | | | | | | |-- SparseLU\n | | | | | | |-- SparseQR\n | | | | | | |-- StdDeque\n | | | | | | |-- StdList\n | | | | | | |-- StdVector\n | | | | | | |-- SuperLUSupport\n | | | | | | |-- UmfPackSupport\n | | | | | | |-- src\n | | | | | | |-- Cholesky\n | | | | | | | |-- LDLT.h\n | | | | | | | |-- LLT.h\n | | | | | | | |-- LLT_MKL.h\n | | | | | | |-- CholmodSupport\n | | | | | | | |-- CholmodSupport.h\n | | | | | | |-- Core\n | | | | | | | |-- Array.h\n | | | | | | | |-- ArrayBase.h\n | | | | | | | |-- ArrayWrapper.h\n | | | | | | | |-- Assign.h\n | | | | | | | |-- AssignEvaluator.h\n | | | | | | | |-- Assign_MKL.h\n | | | | | | | |-- BandMatrix.h\n | | | | | | | |-- Block.h\n | | | | | | | |-- BooleanRedux.h\n | | | | | | | |-- CommaInitializer.h\n | | | | | | | |-- CoreEvaluators.h\n | | | | | | | |-- CoreIterators.h\n | | | | | | | |-- CwiseBinaryOp.h\n | | | | | | | |-- CwiseNullaryOp.h\n | | | | | | | |-- CwiseUnaryOp.h\n | | | | | | | |-- CwiseUnaryView.h\n | | | | | | | |-- DenseBase.h\n | | | | | | | |-- DenseCoeffsBase.h\n | | | | | | | |-- DenseStorage.h\n | | | | | | | |-- Diagonal.h\n | | | | | | | |-- DiagonalMatrix.h\n | | | | | | | |-- DiagonalProduct.h\n | | | | | | | |-- Dot.h\n | | | | | | | |-- EigenBase.h\n | | | | | | | |-- ForceAlignedAccess.h\n | | | | | | | |-- Fuzzy.h\n | | | | | | | |-- GeneralProduct.h\n | | | | | | | |-- GenericPacketMath.h\n | | | | | | | |-- GlobalFunctions.h\n | | | | | | | |-- IO.h\n | | | | | | | |-- Inverse.h\n | | | | | | | |-- Map.h\n | | | | | | | |-- MapBase.h\n | | | | | | | |-- MathFunctions.h\n | | | | | | | |-- Matrix.h\n | | | | | | | |-- MatrixBase.h\n | | | | | | | |-- NestByValue.h\n | | | | | | | |-- NoAlias.h\n | | | | | | | |-- NumTraits.h\n | | | | | | | |-- PermutationMatrix.h\n | | | | | | | |-- PlainObjectBase.h\n | | | | | | | |-- Product.h\n | | | | | | | |-- ProductEvaluators.h\n | | | | | | | |-- Random.h\n | | | | | | | |-- Redux.h\n | | | | | | | |-- Ref.h\n | | | | | | | |-- Replicate.h\n | | | | | | | |-- ReturnByValue.h\n | | | | | | | |-- Reverse.h\n | | | | | | | |-- Select.h\n | | | | | | | |-- SelfAdjointView.h\n | | | | | | | |-- SelfCwiseBinaryOp.h\n | | | | | | | |-- Solve.h\n | | | | | | | |-- SolveTriangular.h\n | | | | | | | |-- SolverBase.h\n | | | | | | | |-- SpecialFunctions.h\n | | | | | | | |-- StableNorm.h\n | | | | | | | |-- Stride.h\n | | | | | | | |-- Swap.h\n | | | | | | | |-- Transpose.h\n | | | | | | | |-- Transpositions.h\n | | | | | | | |-- TriangularMatrix.h\n | | | | | | | |-- VectorBlock.h\n | | | | | | | |-- VectorwiseOp.h\n | | | | | | | |-- Visitor.h\n | | | | | | | |-- arch\n | | | | | | | | |-- AVX\n | | | | | | | | | |-- Complex.h\n | | | | | | | | | |-- MathFunctions.h\n | | | | | | | | | |-- PacketMath.h\n | | | | | | | | | |-- TypeCasting.h\n | | | | | | | | |-- AltiVec\n | | | | | | | | | |-- Complex.h\n | | | | | | | | | |-- MathFunctions.h\n | | | | | | | | | |-- PacketMath.h\n | | | | | | | | |-- CUDA\n | | | | | | | | | |-- MathFunctions.h\n | | | | | | | | | |-- PacketMath.h\n | | | | | | | | |-- Default\n | | | | | | | | | |-- Settings.h\n | | | | | | | | |-- NEON\n | | | | | | | | | |-- Complex.h\n | | | | | | | | | |-- MathFunctions.h\n | | | | | | | | | |-- PacketMath.h\n | | | | | | | | |-- SSE\n | | | | | | | | |-- Complex.h\n | | | | | | | | |-- MathFunctions.h\n | | | | | | | | |-- PacketMath.h\n | | | | | | | | |-- TypeCasting.h\n | | | | | | | |-- functors\n | | | | | | | | |-- AssignmentFunctors.h\n | | | | | | | | |-- BinaryFunctors.h\n | | | | | | | | |-- NullaryFunctors.h\n | | | | | | | | |-- StlFunctors.h\n | | | | | | | | |-- UnaryFunctors.h\n | | | | | | | |-- products\n | | | | | | | | |-- GeneralBlockPanelKernel.h\n | | | | | | | | |-- GeneralMatrixMatrix.h\n | | | | | | | | |-- GeneralMatrixMatrixTriangular.h\n | | | | | | | | |-- GeneralMatrixMatrixTriangular_MKL.h\n | | | | | | | | |-- GeneralMatrixMatrix_MKL.h\n | | | | | | | | |-- GeneralMatrixVector.h\n | | | | | | | | |-- GeneralMatrixVector_MKL.h\n | | | | | | | | |-- Parallelizer.h\n | | | | | | | | |-- SelfadjointMatrixMatrix.h\n | | | | | | | | |-- SelfadjointMatrixMatrix_MKL.h\n | | | | | | | | |-- SelfadjointMatrixVector.h\n | | | | | | | | |-- SelfadjointMatrixVector_MKL.h\n | | | | | | | | |-- SelfadjointProduct.h\n | | | | | | | | |-- SelfadjointRank2Update.h\n | | | | | | | | |-- TriangularMatrixMatrix.h\n | | | | | | | | |-- TriangularMatrixMatrix_MKL.h\n | | | | | | | | |-- TriangularMatrixVector.h\n | | | | | | | | |-- TriangularMatrixVector_MKL.h\n | | | | | | | | |-- TriangularSolverMatrix.h\n | | | | | | | | |-- TriangularSolverMatrix_MKL.h\n | | | | | | | | |-- TriangularSolverVector.h\n | | | | | | | |-- util\n | | | | | | | |-- BlasUtil.h\n | | | | | | | |-- Constants.h\n | | | | | | | |-- DisableStupidWarnings.h\n | | | | | | | |-- ForwardDeclarations.h\n | | | | | | | |-- MKL_support.h\n | | | | | | | |-- Macros.h\n | | | | | | | |-- Memory.h\n | | | | | | | |-- Meta.h\n | | | | | | | |-- NonMPL2.h\n | | | | | | | |-- ReenableStupidWarnings.h\n | | | | | | | |-- StaticAssert.h\n | | | | | | | |-- XprHelper.h\n | | | | | | |-- Eigenvalues\n | | | | | | | |-- ComplexEigenSolver.h\n | | | | | | | |-- ComplexSchur.h\n | | | | | | | |-- ComplexSchur_MKL.h\n | | | | | | | |-- EigenSolver.h\n | | | | | | | |-- GeneralizedEigenSolver.h\n | | | | | | | |-- GeneralizedSelfAdjointEigenSolver.h\n | | | | | | | |-- HessenbergDecomposition.h\n | | | | | | | |-- MatrixBaseEigenvalues.h\n | | | | | | | |-- RealQZ.h\n | | | | | | | |-- RealSchur.h\n | | | | | | | |-- RealSchur_MKL.h\n | | | | | | | |-- SelfAdjointEigenSolver.h\n | | | | | | | |-- SelfAdjointEigenSolver_MKL.h\n | | | | | | | |-- Tridiagonalization.h\n | | | | | | |-- Geometry\n | | | | | | | |-- AlignedBox.h\n | | | | | | | |-- AngleAxis.h\n | | | | | | | |-- EulerAngles.h\n | | | | | | | |-- Homogeneous.h\n | | | | | | | |-- Hyperplane.h\n | | | | | | | |-- OrthoMethods.h\n | | | | | | | |-- ParametrizedLine.h\n | | | | | | | |-- Quaternion.h\n | | | | | | | |-- Rotation2D.h\n | | | | | | | |-- RotationBase.h\n | | | | | | | |-- Scaling.h\n | | | | | | | |-- Transform.h\n | | | | | | | |-- Translation.h\n | | | | | | | |-- Umeyama.h\n | | | | | | | |-- arch\n | | | | | | | |-- Geometry_SSE.h\n | | | | | | |-- Householder\n | | | | | | | |-- BlockHouseholder.h\n | | | | | | | |-- Householder.h\n | | | | | | | |-- HouseholderSequence.h\n | | | | | | |-- IterativeLinearSolvers\n | | | | | | | |-- BasicPreconditioners.h\n | | | | | | | |-- BiCGSTAB.h\n | | | | | | | |-- ConjugateGradient.h\n | | | | | | | |-- IncompleteCholesky.h\n | | | | | | | |-- IncompleteLUT.h\n | | | | | | | |-- IterativeSolverBase.h\n | | | | | | | |-- LeastSquareConjugateGradient.h\n | | | | | | | |-- SolveWithGuess.h\n | | | | | | |-- Jacobi\n | | | | | | | |-- Jacobi.h\n | | | | | | |-- LU\n | | | | | | | |-- Determinant.h\n | | | | | | | |-- FullPivLU.h\n | | | | | | | |-- InverseImpl.h\n | | | | | | | |-- PartialPivLU.h\n | | | | | | | |-- PartialPivLU_MKL.h\n | | | | | | | |-- arch\n | | | | | | | |-- Inverse_SSE.h\n | | | | | | |-- MetisSupport\n | | | | | | | |-- MetisSupport.h\n | | | | | | |-- OrderingMethods\n | | | | | | | |-- Amd.h\n | | | | | | | |-- Eigen_Colamd.h\n | | | | | | | |-- Ordering.h\n | | | | | | |-- PaStiXSupport\n | | | | | | | |-- PaStiXSupport.h\n | | | | | | |-- PardisoSupport\n | | | | | | | |-- PardisoSupport.h\n | | | | | | |-- QR\n | | | | | | | |-- ColPivHouseholderQR.h\n | | | | | | | |-- ColPivHouseholderQR_MKL.h\n | | | | | | | |-- FullPivHouseholderQR.h\n | | | | | | | |-- HouseholderQR.h\n | | | | | | | |-- HouseholderQR_MKL.h\n | | | | | | |-- SPQRSupport\n | | | | | | | |-- SuiteSparseQRSupport.h\n | | | | | | |-- SVD\n | | | | | | | |-- BDCSVD.h\n | | | | | | | |-- JacobiSVD.h\n | | | | | | | |-- JacobiSVD_MKL.h\n | | | | | | | |-- SVDBase.h\n | | | | | | | |-- UpperBidiagonalization.h\n | | | | | | |-- SparseCholesky\n | | | | | | | |-- SimplicialCholesky.h\n | | | | | | | |-- SimplicialCholesky_impl.h\n | | | | | | |-- SparseCore\n | | | | | | | |-- AmbiVector.h\n | | | | | | | |-- CompressedStorage.h\n | | | | | | | |-- ConservativeSparseSparseProduct.h\n | | | | | | | |-- MappedSparseMatrix.h\n | | | | | | | |-- SparseAssign.h\n | | | | | | | |-- SparseBlock.h\n | | | | | | | |-- SparseColEtree.h\n | | | | | | | |-- SparseCompressedBase.h\n | | | | | | | |-- SparseCwiseBinaryOp.h\n | | | | | | | |-- SparseCwiseUnaryOp.h\n | | | | | | | |-- SparseDenseProduct.h\n | | | | | | | |-- SparseDiagonalProduct.h\n | | | | | | | |-- SparseDot.h\n | | | | | | | |-- SparseFuzzy.h\n | | | | | | | |-- SparseMap.h\n | | | | | | | |-- SparseMatrix.h\n | | | | | | | |-- SparseMatrixBase.h\n | | | | | | | |-- SparsePermutation.h\n | | | | | | | |-- SparseProduct.h\n | | | | | | | |-- SparseRedux.h\n | | | | | | | |-- SparseRef.h\n | | | | | | | |-- SparseSelfAdjointView.h\n | | | | | | | |-- SparseSolverBase.h\n | | | | | | | |-- SparseSparseProductWithPruning.h\n | | | | | | | |-- SparseTranspose.h\n | | | | | | | |-- SparseTriangularView.h\n | | | | | | | |-- SparseUtil.h\n | | | | | | | |-- SparseVector.h\n | | | | | | | |-- SparseView.h\n | | | | | | | |-- TriangularSolver.h\n | | | | | | |-- SparseLU\n | | | | | | | |-- SparseLU.h\n | | | | | | | |-- SparseLUImpl.h\n | | | | | | | |-- SparseLU_Memory.h\n | | | | | | | |-- SparseLU_Structs.h\n | | | | | | | |-- SparseLU_SupernodalMatrix.h\n | | | | | | | |-- SparseLU_Utils.h\n | | | | | | | |-- SparseLU_column_bmod.h\n | | | | | | | |-- SparseLU_column_dfs.h\n | | | | | | | |-- SparseLU_copy_to_ucol.h\n | | | | | | | |-- SparseLU_gemm_kernel.h\n | | | | | | | |-- SparseLU_heap_relax_snode.h\n | | | | | | | |-- SparseLU_kernel_bmod.h\n | | | | | | | |-- SparseLU_panel_bmod.h\n | | | | | | | |-- SparseLU_panel_dfs.h\n | | | | | | | |-- SparseLU_pivotL.h\n | | | | | | | |-- SparseLU_pruneL.h\n | | | | | | | |-- SparseLU_relax_snode.h\n | | | | | | |-- SparseQR\n | | | | | | | |-- SparseQR.h\n | | | | | | |-- StlSupport\n | | | | | | | |-- StdDeque.h\n | | | | | | | |-- StdList.h\n | | | | | | | |-- StdVector.h\n | | | | | | | |-- details.h\n | | | | | | |-- SuperLUSupport\n | | | | | | | |-- SuperLUSupport.h\n | | | | | | |-- UmfPackSupport\n | | | | | | | |-- UmfPackSupport.h\n | | | | | | |-- misc\n | | | | | | | |-- Image.h\n | | | | | | | |-- Kernel.h\n | | | | | | | |-- blas.h\n | | | | | | |-- plugins\n | | | | | | |-- ArrayCwiseBinaryOps.h\n | | | | | | |-- ArrayCwiseUnaryOps.h\n | | | | | | |-- BlockMethods.h\n | | | | | | |-- CommonCwiseBinaryOps.h\n | | | | | | |-- CommonCwiseUnaryOps.h\n | | | | | | |-- MatrixCwiseBinaryOps.h\n | | | | | | |-- MatrixCwiseUnaryOps.h\n | | | | | |-- unsupported\n | | | | | |-- Eigen\n | | | | | |-- AdolcForward\n | | | | | |-- AlignedVector3\n | | | | | |-- ArpackSupport\n | | | | | |-- AutoDiff\n | | | | | |-- BVH\n | | | | | |-- FFT\n | | | | | |-- IterativeSolvers\n | | | | | |-- KroneckerProduct\n | | | | | |-- LevenbergMarquardt\n | | | | | |-- MPRealSupport\n | | | | | |-- MatrixFunctions\n | | | | | |-- MoreVectorization\n | | | | | |-- NonLinearOptimization\n | | | | | |-- NumericalDiff\n | | | | | |-- OpenGLSupport\n | | | | | |-- Polynomials\n | | | | | |-- Skyline\n | | | | | |-- SparseExtra\n | | | | | |-- Splines\n | | | | | |-- CXX11\n | | | | | | |-- Core\n | | | | | | |-- Tensor\n | | | | | | |-- TensorSymmetry\n | | | | | | |-- src\n | | | | | | |-- Core\n | | | | | | | |-- util\n | | | | | | | |-- CXX11Meta.h\n | | | | | | | |-- CXX11Workarounds.h\n | | | | | | | |-- EmulateArray.h\n | | | | | | | |-- EmulateCXX11Meta.h\n | | | | | | |-- Tensor\n | | | | | | | |-- Tensor.h\n | | | | | | | |-- TensorArgMax.h\n | | | | | | | |-- TensorAssign.h\n | | | | | | | |-- TensorBase.h\n | | | | | | | |-- TensorBroadcasting.h\n | | | | | | | |-- TensorChipping.h\n | | | | | | | |-- TensorConcatenation.h\n | | | | | | | |-- TensorContraction.h\n | | | | | | | |-- TensorContractionCuda.h\n | | | | | | | |-- TensorContractionThreadPool.h\n | | | | | | | |-- TensorConversion.h\n | | | | | | | |-- TensorConvolution.h\n | | | | | | | |-- TensorCustomOp.h\n | | | | | | | |-- TensorDevice.h\n | | | | | | | |-- TensorDeviceCuda.h\n | | | | | | | |-- TensorDeviceDefault.h\n | | | | | | | |-- TensorDeviceThreadPool.h\n | | | | | | | |-- TensorDimensionList.h\n | | | | | | | |-- TensorDimensions.h\n | | | | | | | |-- TensorEvalTo.h\n | | | | | | | |-- TensorEvaluator.h\n | | | | | | | |-- TensorExecutor.h\n | | | | | | | |-- TensorExpr.h\n | | | | | | | |-- TensorFFT.h\n | | | | | | | |-- TensorFixedSize.h\n | | | | | | | |-- TensorForcedEval.h\n | | | | | | | |-- TensorForwardDeclarations.h\n | | | | | | | |-- TensorFunctors.h\n | | | | | | | |-- TensorGenerator.h\n | | | | | | | |-- TensorIO.h\n | | | | | | | |-- TensorImagePatch.h\n | | | | | | | |-- TensorIndexList.h\n | | | | | | | |-- TensorInflation.h\n | | | | | | | |-- TensorInitializer.h\n | | | | | | | |-- TensorIntDiv.h\n | | | | | | | |-- TensorLayoutSwap.h\n | | | | | | | |-- TensorMacros.h\n | | | | | | | |-- TensorMap.h\n | | | | | | | |-- TensorMeta.h\n | | | | | | | |-- TensorMorphing.h\n | | | | | | | |-- TensorPadding.h\n | | | | | | | |-- TensorPatch.h\n | | | | | | | |-- TensorReduction.h\n | | | | | | | |-- TensorReductionCuda.h\n | | | | | | | |-- TensorRef.h\n | | | | | | | |-- TensorReverse.h\n | | | | | | | |-- TensorShuffling.h\n | | | | | | | |-- TensorStorage.h\n | | | | | | | |-- TensorStriding.h\n | | | | | | | |-- TensorTraits.h\n | | | | | | | |-- TensorUInt128.h\n | | | | | | | |-- TensorVolumePatch.h\n | | | | | | |-- TensorSymmetry\n | | | | | | |-- DynamicSymmetry.h\n | | | | | | |-- StaticSymmetry.h\n | | | | | | |-- Symmetry.h\n | | | | | | |-- util\n | | | | | | |-- TemplateGroupTheory.h\n | | | | | |-- src\n | | | | | |-- AutoDiff\n | | | | | | |-- AutoDiffJacobian.h\n | | | | | | |-- AutoDiffScalar.h\n | | | | | | |-- AutoDiffVector.h\n | | | | | |-- BVH\n | | | | | | |-- BVAlgorithms.h\n | | | | | | |-- KdBVH.h\n | | | | | |-- Eigenvalues\n | | | | | | |-- ArpackSelfAdjointEigenSolver.h\n | | | | | |-- FFT\n | | | | | | |-- ei_fftw_impl.h\n | | | | | | |-- ei_kissfft_impl.h\n | | | | | |-- IterativeSolvers\n | | | | | | |-- ConstrainedConjGrad.h\n | | | | | | |-- DGMRES.h\n | | | | | | |-- GMRES.h\n | | | | | | |-- IncompleteLU.h\n | | | | | | |-- IterationController.h\n | | | | | | |-- MINRES.h\n | | | | | | |-- Scaling.h\n | | | | | |-- KroneckerProduct\n | | | | | | |-- KroneckerTensorProduct.h\n | | | | | |-- LevenbergMarquardt\n | | | | | | |-- LMcovar.h\n | | | | | | |-- LMonestep.h\n | | | | | | |-- LMpar.h\n | | | | | | |-- LMqrsolv.h\n | | | | | | |-- LevenbergMarquardt.h\n | | | | | |-- MatrixFunctions\n | | | | | | |-- MatrixExponential.h\n | | | | | | |-- MatrixFunction.h\n | | | | | | |-- MatrixLogarithm.h\n | | | | | | |-- MatrixPower.h\n | | | | | | |-- MatrixSquareRoot.h\n | | | | | | |-- StemFunction.h\n | | | | | |-- MoreVectorization\n | | | | | | |-- MathFunctions.h\n | | | | | |-- NonLinearOptimization\n | | | | | | |-- HybridNonLinearSolver.h\n | | | | | | |-- LevenbergMarquardt.h\n | | | | | | |-- chkder.h\n | | | | | | |-- covar.h\n | | | | | | |-- dogleg.h\n | | | | | | |-- fdjac1.h\n | | | | | | |-- lmpar.h\n | | | | | | |-- qrsolv.h\n | | | | | | |-- r1mpyq.h\n | | | | | | |-- r1updt.h\n | | | | | | |-- rwupdt.h\n | | | | | |-- NumericalDiff\n | | | | | | |-- NumericalDiff.h\n | | | | | |-- Polynomials\n | | | | | | |-- Companion.h\n | | | | | | |-- PolynomialSolver.h\n | | | | | | |-- PolynomialUtils.h\n | | | | | |-- Skyline\n | | | | | | |-- SkylineInplaceLU.h\n | | | | | | |-- SkylineMatrix.h\n | | | | | | |-- SkylineMatrixBase.h\n | | | | | | |-- SkylineProduct.h\n | | | | | | |-- SkylineStorage.h\n | | | | | | |-- SkylineUtil.h\n | | | | | |-- SparseExtra\n | | | | | | |-- BlockOfDynamicSparseMatrix.h\n | | | | | | |-- BlockSparseMatrix.h\n | | | | | | |-- DynamicSparseMatrix.h\n | | | | | | |-- MarketIO.h\n | | | | | | |-- MatrixMarketIterator.h\n | | | | | | |-- RandomSetter.h\n | | | | | |-- Splines\n | | | | | |-- Spline.h\n | | | | | |-- SplineFitting.h\n | | | | | |-- SplineFwd.h\n | | | | |-- filters\n | | | | | |-- digital_filter.cc\n | | | | | |-- digital_filter.h\n | | | | | |-- digital_filter_coefficients.cc\n | | | | | |-- digital_filter_coefficients.h\n | | | | | |-- mean_filter.cc\n | | | | | |-- mean_filter.h\n | | | | |-- map_matching\n | | | | | |-- LocalGeographicCS.hpp\n | | | | | |-- circle.h\n | | | | | |-- convert_coordinates.hpp\n | | | | | |-- coordinate_transformation.h\n | | | | | |-- cs.h\n | | | | | |-- heading.h\n | | | | | |-- localization_.h\n | | | | | |-- map_matching.h\n | | | | | |-- navi_point.h\n | | | | | |-- point.h\n | | | | | |-- spline.h\n | | | | | |-- steering_angle.h\n | | | | |-- math\n | | | | |-- linear_quadratic_regulator.cc\n | | | | |-- linear_quadratic_regulator.h\n | | | | |-- math_utils.cc\n | | | | |-- math_utils.h\n | | | | |-- vec2d.cc\n | | | | |-- vec2d.h\n | | | |-- lat_controller\n | | | | |-- lat_controller.h\n | | | |-- lon_controller\n | | | | |-- lon_controller.h\n | | | | |-- vehicle_dynamics.h\n | | | |-- lqr_controller\n | | | | |-- lqr_lat_controller.h\n | | | | |-- simple_lateral_debug.h\n | | | |-- pid\n | | | |-- pid_controller.h\n | | |-- lib\n | | |-- libcontroller.so\n | |-- Map\n | | |-- readme\n | | |-- include\n | | | |-- Attribute.hpp\n | | | |-- BoundingBox.hpp\n | | | |-- CompoundLanelet.hpp\n | | | |-- LLTree.hpp\n | | | |-- Lanelet.hpp\n | | | |-- LaneletBase.hpp\n | | | |-- LaneletFwd.hpp\n | | | |-- LaneletGraph.hpp\n | | | |-- LaneletMap.hpp\n | | | |-- LineStrip.hpp\n | | | |-- MapData.h\n | | | |-- MapInterface.h\n | | | |-- RTree.h\n | | | |-- RegulatoryElement.hpp\n | | | |-- RoadMap.h\n | | | |-- lanelet_point.hpp\n | | | |-- llet_xml.hpp\n | | | |-- mercator.hpp\n | | | |-- normalize_angle.hpp\n | | | |-- prettyprint.hpp\n | | | |-- regulator.h\n | | |-- lib\n | | |-- libroad_map.so\n | |-- Navi\n | | |-- readme\n | | |-- include\n | | | |-- route.h\n | | | |-- route_data.h\n | | |-- lib\n | | |-- libroute.so\n | |-- Planning\n | |-- include\n | | |-- collision_check\n | | | |-- collision_check.h\n | | |-- common\n | | | |-- LocalGeographicCS.hpp\n | | | |-- car_state.h\n | | | |-- color_util.h\n | | | |-- convert_coordinates.hpp\n | | | |-- cs.h\n | | | |-- enum_list.h\n | | | |-- math_util.h\n | | | |-- navi_point.h\n | | | |-- path.h\n | | | |-- path_tools.h\n | | | |-- point.h\n | | | |-- rect.h\n | | |-- map_matching\n | | | |-- map_matching.h\n | | |-- park\n | | | |-- park.h\n | | |-- planning\n | | | |-- planning.h\n | | | |-- planning_output.h\n | | | |-- planning_param.h\n | | | |-- route_data.h\n | | |-- spline\n | | | |-- math_tools.h\n | | | |-- quartic_spline.h\n | | | |-- quintic_spline.h\n | | | |-- spline.h\n | | |-- trajectory\n | | | |-- trajectory.h\n | | | |-- trajectory_sets.h\n | | |-- vehicle_dynamic\n | | |-- cau_heading_steering.h\n | | |-- circle.h\n | | |-- heading.h\n | | |-- nearest_point_on_spline.h\n | | |-- steering_angle.h\n | |-- lib\n | |-- libplanning.so\n |-- x86\n |-- Camera\n | |-- lane_detect\n | | |-- readme.txt\n | | |-- include\n | | | |-- LaneDetector.h\n | | | |-- LaneDraw.h\n | | | |-- lane_utils.h\n | | | |-- main_proc.h\n | | | |-- Matrix\n | | | | |-- LeastSquares.h\n | | | | |-- Matrix.h\n | | | |-- bean\n | | | | |-- BallotBox.h\n | | | | |-- BaseDefine.h\n | | | | |-- BranchLane.h\n | | | | |-- ComplexLaneBoundary.h\n | | | | |-- Lane.h\n | | | | |-- LaneArea.h\n | | | | |-- LaneDetectorTools.h\n | | | | |-- LaneMarker.h\n | | | | |-- LaneMarkerInComplexLaneBoundary.h\n | | | | |-- LaneMarkerLine.h\n | | | | |-- LaneMarkerLineSequence.h\n | | | | |-- LaneMarkerLineSequences.h\n | | | | |-- LaneMarkerLines.h\n | | | | |-- LaneMarkerPair.h\n | | | | |-- LaneMarkerPairs.h\n | | | | |-- LaneMarkerPoint.h\n | | | | |-- LaneMarkerPoints.h\n | | | | |-- LaneMarkers.h\n | | | | |-- LaneParameter.h\n | | | | |-- LaneParameterEstimator.h\n | | | | |-- LaneParameterOneSide.h\n | | | | |-- LaneRegion.h\n | | | | |-- LaneSide.h\n | | | |-- lane_lcm\n | | | | |-- image_info.hpp\n | | | | |-- ins_info.hpp\n | | | | |-- line_info.hpp\n | | | | |-- line_point.hpp\n | | | |-- sensor_lcm\n | | | | |-- cam_obj_list.hpp\n | | | | |-- cam_object.hpp\n | | | | |-- i_point.hpp\n | | | | |-- ibox_2d.hpp\n | | | | |-- lidar_obj_list.hpp\n | | | | |-- lidar_object.hpp\n | | | | |-- obstacle_alarm_report.hpp\n | | | | |-- v_point.hpp\n | | | | |-- vbox_2d.hpp\n | | | | |-- vbox_3d.hpp\n | | | | |-- w_point.hpp\n | | | | |-- wbox_2d.hpp\n | | | | |-- wbox_3d.hpp\n | | | |-- spline\n | | | | |-- spline.h\n | | | |-- utils\n | | | |-- GridMap1D.h\n | | | |-- Mconfig.h\n | | | |-- OutputInfo.h\n | | | |-- RefOffset.h\n | | | |-- colormisc.h\n | | | |-- config.h\n | | | |-- config2.h\n | | | |-- flexarray.h\n | | | |-- globalVal.h\n | | | |-- imrgb.h\n | | | |-- lm_type.h\n | | | |-- matutil-d.h\n | | | |-- my_resource.h\n | | | |-- roadimage_window.h\n | | | |-- tmc_stereobmp-forMono.h\n | | | |-- type.h\n | | |-- lib\n | | |-- liblane_detect.so\n | | |-- liblanedetect_lib.so\n | |-- vision_ssd_detect\n | |-- readme\n | |-- include\n | | |-- Config.h\n | | |-- camera_obj_list.hpp\n | | |-- distance_calculation.hpp\n | | |-- vision_detect_node.hpp\n | | |-- vision_detector.hpp\n | | |-- caffe\n | | | |-- blob.hpp\n | | | |-- caffe.hpp\n | | | |-- common.hpp\n | | | |-- data_reader.hpp\n | | | |-- data_transformer.hpp\n | | | |-- filler.hpp\n | | | |-- internal_thread.hpp\n | | | |-- layer.hpp\n | | | |-- layer_factory.hpp\n | | | |-- net.hpp\n | | | |-- parallel.hpp\n | | | |-- sgd_solvers.hpp\n | | | |-- solver.hpp\n | | | |-- solver_factory.hpp\n | | | |-- syncedmem.hpp\n | | | |-- layers\n | | | | |-- absval_layer.hpp\n | | | | |-- accuracy_layer.hpp\n | | | | |-- annotated_data_layer.hpp\n | | | | |-- argmax_layer.hpp\n | | | | |-- base_conv_layer.hpp\n | | | | |-- base_data_layer.hpp\n | | | | |-- batch_norm_layer.hpp\n | | | | |-- batch_reindex_layer.hpp\n | | | | |-- bias_layer.hpp\n | | | | |-- bnll_layer.hpp\n | | | | |-- concat_layer.hpp\n | | | | |-- contrastive_loss_layer.hpp\n | | | | |-- conv_layer.hpp\n | | | | |-- crop_layer.hpp\n | | | | |-- cudnn_conv_layer.hpp\n | | | | |-- cudnn_lcn_layer.hpp\n | | | | |-- cudnn_lrn_layer.hpp\n | | | | |-- cudnn_pooling_layer.hpp\n | | | | |-- cudnn_relu_layer.hpp\n | | | | |-- cudnn_sigmoid_layer.hpp\n | | | | |-- cudnn_softmax_layer.hpp\n | | | | |-- cudnn_tanh_layer.hpp\n | | | | |-- data_layer.hpp\n | | | | |-- deconv_layer.hpp\n | | | | |-- depthwise_conv_layer.hpp\n | | | | |-- detection_evaluate_layer.hpp\n | | | | |-- detection_output_layer.hpp\n | | | | |-- dropout_layer.hpp\n | | | | |-- dummy_data_layer.hpp\n | | | | |-- eltwise_layer.hpp\n | | | | |-- elu_layer.hpp\n | | | | |-- embed_layer.hpp\n | | | | |-- euclidean_loss_layer.hpp\n | | | | |-- exp_layer.hpp\n | | | | |-- filter_layer.hpp\n | | | | |-- flatten_layer.hpp\n | | | | |-- hdf5_data_layer.hpp\n | | | | |-- hdf5_output_layer.hpp\n | | | | |-- hinge_loss_layer.hpp\n | | | | |-- im2col_layer.hpp\n | | | | |-- image_data_layer.hpp\n | | | | |-- infogain_loss_layer.hpp\n | | | | |-- inner_product_layer.hpp\n | | | | |-- input_layer.hpp\n | | | | |-- log_layer.hpp\n | | | | |-- loss_layer.hpp\n | | | | |-- lrn_layer.hpp\n | | | | |-- lstm_layer.hpp\n | | | | |-- memory_data_layer.hpp\n | | | | |-- multibox_loss_layer.hpp\n | | | | |-- multinomial_logistic_loss_layer.hpp\n | | | | |-- mvn_layer.hpp\n | | | | |-- neuron_layer.hpp\n | | | | |-- normalize_layer.hpp\n | | | | |-- parameter_layer.hpp\n | | | | |-- permute_layer.hpp\n | | | | |-- pooling_layer.hpp\n | | | | |-- power_layer.hpp\n | | | | |-- prelu_layer.hpp\n | | | | |-- prior_box_layer.hpp\n | | | | |-- python_layer.hpp\n | | | | |-- recurrent_layer.hpp\n | | | | |-- reduction_layer.hpp\n | | | | |-- relu_layer.hpp\n | | | | |-- reshape_layer.hpp\n | | | | |-- rnn_layer.hpp\n | | | | |-- scale_layer.hpp\n | | | | |-- sigmoid_cross_entropy_loss_layer.hpp\n | | | | |-- sigmoid_layer.hpp\n | | | | |-- silence_layer.hpp\n | | | | |-- slice_layer.hpp\n | | | | |-- smooth_L1_loss_layer.hpp\n | | | | |-- softmax_layer.hpp\n | | | | |-- softmax_loss_layer.hpp\n | | | | |-- split_layer.hpp\n | | | | |-- spp_layer.hpp\n | | | | |-- tanh_layer.hpp\n | | | | |-- threshold_layer.hpp\n | | | | |-- tile_layer.hpp\n | | | | |-- video_data_layer.hpp\n | | | | |-- window_data_layer.hpp\n | | | |-- test\n | | | | |-- test_caffe_main.hpp\n | | | | |-- test_gradient_check_util.hpp\n | | | |-- util\n | | | |-- bbox_util.hpp\n | | | |-- benchmark.hpp\n | | | |-- blocking_queue.hpp\n | | | |-- cudnn.hpp\n | | | |-- db.hpp\n | | | |-- db_leveldb.hpp\n | | | |-- db_lmdb.hpp\n | | | |-- device_alternate.hpp\n | | | |-- format.hpp\n | | | |-- gpu_util.cuh\n | | | |-- hdf5.hpp\n | | | |-- im2col.hpp\n | | | |-- im_transforms.hpp\n | | | |-- insert_splits.hpp\n | | | |-- io.hpp\n | | | |-- math_functions.hpp\n | | | |-- mkl_alternate.hpp\n | | | |-- rng.hpp\n | | | |-- sampler.hpp\n | | | |-- signal_handler.h\n | | | |-- upgrade_proto.hpp\n | | |-- ssd_detection\n | | |-- Config.h\n | | |-- camera_obj_list.hpp\n | | |-- distance_calculation.hpp\n | | |-- kf_tracker.hpp\n | | |-- vision_detect_node.hpp\n | | |-- vision_detector.hpp\n | |-- kalman\n | | |-- kalmanfilter.cpp\n | | |-- kalmanfilter.h\n | | |-- math_util.h\n | | |-- matrix.cpp\n | | |-- matrix.h\n | |-- lib\n | | |-- libcaffe.a\n | | |-- libcaffe.so\n | | |-- libvision_ssd_detect.so\n | |-- util\n | |-- Affinity.cpp\n | |-- Affinity.h\n | |-- BoundingBox.cpp\n | |-- BoundingBox.h\n |-- Common\n | |-- readme\n | |-- include\n | | |-- base\n | | | |-- nad_base.h\n | | | |-- nad_enum.h\n | | | |-- nad_function.h\n | | | |-- nad_retcode.h\n | | | |-- nad_type.h\n | | | |-- config\n | | | | |-- Config.h\n | | | | |-- nad_config.h\n | | | | |-- route_config.h\n | | | |-- db\n | | | | |-- nad_db.h\n | | | |-- log\n | | | | |-- nad_glog.h\n | | | |-- xml\n | | | |-- pugiconfig.hpp\n | | | |-- pugixml.hpp\n | | |-- distributed_runtime\n | | | |-- info\n | | | | |-- nad_info.h\n | | | | |-- nad_speed.h\n | | | |-- session\n | | | | |-- nad_session.h\n | | | |-- starter\n | | | | |-- nad_starter.h\n | | | |-- timer\n | | | |-- nad_timer.h\n | | |-- oam\n | | | |-- task\n | | | |-- nad_task_func.h\n | | | |-- nad_task_userfunc.h\n | | |-- route\n | | |-- LocalGeographicCS.hpp\n | | |-- convert_coordinates.hpp\n | | |-- heading.h\n | | |-- math_util.h\n | |-- lib\n | |-- libcommon.so\n |-- Control\n | |-- include\n | | |-- chassis.h\n | | |-- controller.h\n | | |-- controller_agent.h\n | | |-- controller_alarm_code.h\n | | |-- controller_config.h\n | | |-- controller_output.h\n | | |-- controller_output_alarm.h\n | | |-- controller_output_alarm_code.h\n | | |-- debug_output.h\n | | |-- gear_position.h\n | | |-- generic_controller.h\n | | |-- local_localization.h\n | | |-- localization.h\n | | |-- localization_.h\n | | |-- nav_points.h\n | | |-- navi_point.h\n | | |-- scheduler.h\n | | |-- script.sh\n | | |-- trajectory.h\n | | |-- common\n | | | |-- LocalGeographicCS.hpp\n | | | |-- cputime.h\n | | | |-- interpolation_1d.h\n | | | |-- interpolation_2d.h\n | | | |-- kalman_filter.h\n | | | |-- kalman_filter_app.h\n | | | |-- math_util.h\n | | | |-- navi_point.h\n | | | |-- path.h\n | | | |-- eigen3\n | | | | |-- signature_of_eigen3_matrix_library\n | | | | |-- Eigen\n | | | | | |-- Cholesky\n | | | | | |-- CholmodSupport\n | | | | | |-- Core\n | | | | | |-- Dense\n | | | | | |-- Eigen\n | | | | | |-- Eigenvalues\n | | | | | |-- Geometry\n | | | | | |-- Householder\n | | | | | |-- IterativeLinearSolvers\n | | | | | |-- Jacobi\n | | | | | |-- LU\n | | | | | |-- MetisSupport\n | | | | | |-- OrderingMethods\n | | | | | |-- PaStiXSupport\n | | | | | |-- PardisoSupport\n | | | | | |-- QR\n | | | | | |-- QtAlignedMalloc\n | | | | | |-- SPQRSupport\n | | | | | |-- SVD\n | | | | | |-- Sparse\n | | | | | |-- SparseCholesky\n | | | | | |-- SparseCore\n | | | | | |-- SparseLU\n | | | | | |-- SparseQR\n | | | | | |-- StdDeque\n | | | | | |-- StdList\n | | | | | |-- StdVector\n | | | | | |-- SuperLUSupport\n | | | | | |-- UmfPackSupport\n | | | | | |-- src\n | | | | | |-- Cholesky\n | | | | | | |-- LDLT.h\n | | | | | | |-- LLT.h\n | | | | | | |-- LLT_MKL.h\n | | | | | |-- CholmodSupport\n | | | | | | |-- CholmodSupport.h\n | | | | | |-- Core\n | | | | | | |-- Array.h\n | | | | | | |-- ArrayBase.h\n | | | | | | |-- ArrayWrapper.h\n | | | | | | |-- Assign.h\n | | | | | | |-- AssignEvaluator.h\n | | | | | | |-- Assign_MKL.h\n | | | | | | |-- BandMatrix.h\n | | | | | | |-- Block.h\n | | | | | | |-- BooleanRedux.h\n | | | | | | |-- CommaInitializer.h\n | | | | | | |-- CoreEvaluators.h\n | | | | | | |-- CoreIterators.h\n | | | | | | |-- CwiseBinaryOp.h\n | | | | | | |-- CwiseNullaryOp.h\n | | | | | | |-- CwiseUnaryOp.h\n | | | | | | |-- CwiseUnaryView.h\n | | | | | | |-- DenseBase.h\n | | | | | | |-- DenseCoeffsBase.h\n | | | | | | |-- DenseStorage.h\n | | | | | | |-- Diagonal.h\n | | | | | | |-- DiagonalMatrix.h\n | | | | | | |-- DiagonalProduct.h\n | | | | | | |-- Dot.h\n | | | | | | |-- EigenBase.h\n | | | | | | |-- ForceAlignedAccess.h\n | | | | | | |-- Fuzzy.h\n | | | | | | |-- GeneralProduct.h\n | | | | | | |-- GenericPacketMath.h\n | | | | | | |-- GlobalFunctions.h\n | | | | | | |-- IO.h\n | | | | | | |-- Inverse.h\n | | | | | | |-- Map.h\n | | | | | | |-- MapBase.h\n | | | | | | |-- MathFunctions.h\n | | | | | | |-- Matrix.h\n | | | | | | |-- MatrixBase.h\n | | | | | | |-- NestByValue.h\n | | | | | | |-- NoAlias.h\n | | | | | | |-- NumTraits.h\n | | | | | | |-- PermutationMatrix.h\n | | | | | | |-- PlainObjectBase.h\n | | | | | | |-- Product.h\n | | | | | | |-- ProductEvaluators.h\n | | | | | | |-- Random.h\n | | | | | | |-- Redux.h\n | | | | | | |-- Ref.h\n | | | | | | |-- Replicate.h\n | | | | | | |-- ReturnByValue.h\n | | | | | | |-- Reverse.h\n | | | | | | |-- Select.h\n | | | | | | |-- SelfAdjointView.h\n | | | | | | |-- SelfCwiseBinaryOp.h\n | | | | | | |-- Solve.h\n | | | | | | |-- SolveTriangular.h\n | | | | | | |-- SolverBase.h\n | | | | | | |-- SpecialFunctions.h\n | | | | | | |-- StableNorm.h\n | | | | | | |-- Stride.h\n | | | | | | |-- Swap.h\n | | | | | | |-- Transpose.h\n | | | | | | |-- Transpositions.h\n | | | | | | |-- TriangularMatrix.h\n | | | | | | |-- VectorBlock.h\n | | | | | | |-- VectorwiseOp.h\n | | | | | | |-- Visitor.h\n | | | | | | |-- arch\n | | | | | | | |-- AVX\n | | | | | | | | |-- Complex.h\n | | | | | | | | |-- MathFunctions.h\n | | | | | | | | |-- PacketMath.h\n | | | | | | | | |-- TypeCasting.h\n | | | | | | | |-- AltiVec\n | | | | | | | | |-- Complex.h\n | | | | | | | | |-- MathFunctions.h\n | | | | | | | | |-- PacketMath.h\n | | | | | | | |-- CUDA\n | | | | | | | | |-- MathFunctions.h\n | | | | | | | | |-- PacketMath.h\n | | | | | | | |-- Default\n | | | | | | | | |-- Settings.h\n | | | | | | | |-- NEON\n | | | | | | | | |-- Complex.h\n | | | | | | | | |-- MathFunctions.h\n | | | | | | | | |-- PacketMath.h\n | | | | | | | |-- SSE\n | | | | | | | |-- Complex.h\n | | | | | | | |-- MathFunctions.h\n | | | | | | | |-- PacketMath.h\n | | | | | | | |-- TypeCasting.h\n | | | | | | |-- functors\n | | | | | | | |-- AssignmentFunctors.h\n | | | | | | | |-- BinaryFunctors.h\n | | | | | | | |-- NullaryFunctors.h\n | | | | | | | |-- StlFunctors.h\n | | | | | | | |-- UnaryFunctors.h\n | | | | | | |-- products\n | | | | | | | |-- GeneralBlockPanelKernel.h\n | | | | | | | |-- GeneralMatrixMatrix.h\n | | | | | | | |-- GeneralMatrixMatrixTriangular.h\n | | | | | | | |-- GeneralMatrixMatrixTriangular_MKL.h\n | | | | | | | |-- GeneralMatrixMatrix_MKL.h\n | | | | | | | |-- GeneralMatrixVector.h\n | | | | | | | |-- GeneralMatrixVector_MKL.h\n | | | | | | | |-- Parallelizer.h\n | | | | | | | |-- SelfadjointMatrixMatrix.h\n | | | | | | | |-- SelfadjointMatrixMatrix_MKL.h\n | | | | | | | |-- SelfadjointMatrixVector.h\n | | | | | | | |-- SelfadjointMatrixVector_MKL.h\n | | | | | | | |-- SelfadjointProduct.h\n | | | | | | | |-- SelfadjointRank2Update.h\n | | | | | | | |-- TriangularMatrixMatrix.h\n | | | | | | | |-- TriangularMatrixMatrix_MKL.h\n | | | | | | | |-- TriangularMatrixVector.h\n | | | | | | | |-- TriangularMatrixVector_MKL.h\n | | | | | | | |-- TriangularSolverMatrix.h\n | | | | | | | |-- TriangularSolverMatrix_MKL.h\n | | | | | | | |-- TriangularSolverVector.h\n | | | | | | |-- util\n | | | | | | |-- BlasUtil.h\n | | | | | | |-- Constants.h\n | | | | | | |-- DisableStupidWarnings.h\n | | | | | | |-- ForwardDeclarations.h\n | | | | | | |-- MKL_support.h\n | | | | | | |-- Macros.h\n | | | | | | |-- Memory.h\n | | | | | | |-- Meta.h\n | | | | | | |-- NonMPL2.h\n | | | | | | |-- ReenableStupidWarnings.h\n | | | | | | |-- StaticAssert.h\n | | | | | | |-- XprHelper.h\n | | | | | |-- Eigenvalues\n | | | | | | |-- ComplexEigenSolver.h\n | | | | | | |-- ComplexSchur.h\n | | | | | | |-- ComplexSchur_MKL.h\n | | | | | | |-- EigenSolver.h\n | | | | | | |-- GeneralizedEigenSolver.h\n | | | | | | |-- GeneralizedSelfAdjointEigenSolver.h\n | | | | | | |-- HessenbergDecomposition.h\n | | | | | | |-- MatrixBaseEigenvalues.h\n | | | | | | |-- RealQZ.h\n | | | | | | |-- RealSchur.h\n | | | | | | |-- RealSchur_MKL.h\n | | | | | | |-- SelfAdjointEigenSolver.h\n | | | | | | |-- SelfAdjointEigenSolver_MKL.h\n | | | | | | |-- Tridiagonalization.h\n | | | | | |-- Geometry\n | | | | | | |-- AlignedBox.h\n | | | | | | |-- AngleAxis.h\n | | | | | | |-- EulerAngles.h\n | | | | | | |-- Homogeneous.h\n | | | | | | |-- Hyperplane.h\n | | | | | | |-- OrthoMethods.h\n | | | | | | |-- ParametrizedLine.h\n | | | | | | |-- Quaternion.h\n | | | | | | |-- Rotation2D.h\n | | | | | | |-- RotationBase.h\n | | | | | | |-- Scaling.h\n | | | | | | |-- Transform.h\n | | | | | | |-- Translation.h\n | | | | | | |-- Umeyama.h\n | | | | | | |-- arch\n | | | | | | |-- Geometry_SSE.h\n | | | | | |-- Householder\n | | | | | | |-- BlockHouseholder.h\n | | | | | | |-- Householder.h\n | | | | | | |-- HouseholderSequence.h\n | | | | | |-- IterativeLinearSolvers\n | | | | | | |-- BasicPreconditioners.h\n | | | | | | |-- BiCGSTAB.h\n | | | | | | |-- ConjugateGradient.h\n | | | | | | |-- IncompleteCholesky.h\n | | | | | | |-- IncompleteLUT.h\n | | | | | | |-- IterativeSolverBase.h\n | | | | | | |-- LeastSquareConjugateGradient.h\n | | | | | | |-- SolveWithGuess.h\n | | | | | |-- Jacobi\n | | | | | | |-- Jacobi.h\n | | | | | |-- LU\n | | | | | | |-- Determinant.h\n | | | | | | |-- FullPivLU.h\n | | | | | | |-- InverseImpl.h\n | | | | | | |-- PartialPivLU.h\n | | | | | | |-- PartialPivLU_MKL.h\n | | | | | | |-- arch\n | | | | | | |-- Inverse_SSE.h\n | | | | | |-- MetisSupport\n | | | | | | |-- MetisSupport.h\n | | | | | |-- OrderingMethods\n | | | | | | |-- Amd.h\n | | | | | | |-- Eigen_Colamd.h\n | | | | | | |-- Ordering.h\n | | | | | |-- PaStiXSupport\n | | | | | | |-- PaStiXSupport.h\n | | | | | |-- PardisoSupport\n | | | | | | |-- PardisoSupport.h\n | | | | | |-- QR\n | | | | | | |-- ColPivHouseholderQR.h\n | | | | | | |-- ColPivHouseholderQR_MKL.h\n | | | | | | |-- FullPivHouseholderQR.h\n | | | | | | |-- HouseholderQR.h\n | | | | | | |-- HouseholderQR_MKL.h\n | | | | | |-- SPQRSupport\n | | | | | | |-- SuiteSparseQRSupport.h\n | | | | | |-- SVD\n | | | | | | |-- BDCSVD.h\n | | | | | | |-- JacobiSVD.h\n | | | | | | |-- JacobiSVD_MKL.h\n | | | | | | |-- SVDBase.h\n | | | | | | |-- UpperBidiagonalization.h\n | | | | | |-- SparseCholesky\n | | | | | | |-- SimplicialCholesky.h\n | | | | | | |-- SimplicialCholesky_impl.h\n | | | | | |-- SparseCore\n | | | | | | |-- AmbiVector.h\n | | | | | | |-- CompressedStorage.h\n | | | | | | |-- ConservativeSparseSparseProduct.h\n | | | | | | |-- MappedSparseMatrix.h\n | | | | | | |-- SparseAssign.h\n | | | | | | |-- SparseBlock.h\n | | | | | | |-- SparseColEtree.h\n | | | | | | |-- SparseCompressedBase.h\n | | | | | | |-- SparseCwiseBinaryOp.h\n | | | | | | |-- SparseCwiseUnaryOp.h\n | | | | | | |-- SparseDenseProduct.h\n | | | | | | |-- SparseDiagonalProduct.h\n | | | | | | |-- SparseDot.h\n | | | | | | |-- SparseFuzzy.h\n | | | | | | |-- SparseMap.h\n | | | | | | |-- SparseMatrix.h\n | | | | | | |-- SparseMatrixBase.h\n | | | | | | |-- SparsePermutation.h\n | | | | | | |-- SparseProduct.h\n | | | | | | |-- SparseRedux.h\n | | | | | | |-- SparseRef.h\n | | | | | | |-- SparseSelfAdjointView.h\n | | | | | | |-- SparseSolverBase.h\n | | | | | | |-- SparseSparseProductWithPruning.h\n | | | | | | |-- SparseTranspose.h\n | | | | | | |-- SparseTriangularView.h\n | | | | | | |-- SparseUtil.h\n | | | | | | |-- SparseVector.h\n | | | | | | |-- SparseView.h\n | | | | | | |-- TriangularSolver.h\n | | | | | |-- SparseLU\n | | | | | | |-- SparseLU.h\n | | | | | | |-- SparseLUImpl.h\n | | | | | | |-- SparseLU_Memory.h\n | | | | | | |-- SparseLU_Structs.h\n | | | | | | |-- SparseLU_SupernodalMatrix.h\n | | | | | | |-- SparseLU_Utils.h\n | | | | | | |-- SparseLU_column_bmod.h\n | | | | | | |-- SparseLU_column_dfs.h\n | | | | | | |-- SparseLU_copy_to_ucol.h\n | | | | | | |-- SparseLU_gemm_kernel.h\n | | | | | | |-- SparseLU_heap_relax_snode.h\n | | | | | | |-- SparseLU_kernel_bmod.h\n | | | | | | |-- SparseLU_panel_bmod.h\n | | | | | | |-- SparseLU_panel_dfs.h\n | | | | | | |-- SparseLU_pivotL.h\n | | | | | | |-- SparseLU_pruneL.h\n | | | | | | |-- SparseLU_relax_snode.h\n | | | | | |-- SparseQR\n | | | | | | |-- SparseQR.h\n | | | | | |-- StlSupport\n | | | | | | |-- StdDeque.h\n | | | | | | |-- StdList.h\n | | | | | | |-- StdVector.h\n | | | | | | |-- details.h\n | | | | | |-- SuperLUSupport\n | | | | | | |-- SuperLUSupport.h\n | | | | | |-- UmfPackSupport\n | | | | | | |-- UmfPackSupport.h\n | | | | | |-- misc\n | | | | | | |-- Image.h\n | | | | | | |-- Kernel.h\n | | | | | | |-- blas.h\n | | | | | |-- plugins\n | | | | | |-- ArrayCwiseBinaryOps.h\n | | | | | |-- ArrayCwiseUnaryOps.h\n | | | | | |-- BlockMethods.h\n | | | | | |-- CommonCwiseBinaryOps.h\n | | | | | |-- CommonCwiseUnaryOps.h\n | | | | | |-- MatrixCwiseBinaryOps.h\n | | | | | |-- MatrixCwiseUnaryOps.h\n | | | | |-- unsupported\n | | | | |-- Eigen\n | | | | |-- AdolcForward\n | | | | |-- AlignedVector3\n | | | | |-- ArpackSupport\n | | | | |-- AutoDiff\n | | | | |-- BVH\n | | | | |-- FFT\n | | | | |-- IterativeSolvers\n | | | | |-- KroneckerProduct\n | | | | |-- LevenbergMarquardt\n | | | | |-- MPRealSupport\n | | | | |-- MatrixFunctions\n | | | | |-- MoreVectorization\n | | | | |-- NonLinearOptimization\n | | | | |-- NumericalDiff\n | | | | |-- OpenGLSupport\n | | | | |-- Polynomials\n | | | | |-- Skyline\n | | | | |-- SparseExtra\n | | | | |-- Splines\n | | | | |-- CXX11\n | | | | | |-- Core\n | | | | | |-- Tensor\n | | | | | |-- TensorSymmetry\n | | | | | |-- src\n | | | | | |-- Core\n | | | | | | |-- util\n | | | | | | |-- CXX11Meta.h\n | | | | | | |-- CXX11Workarounds.h\n | | | | | | |-- EmulateArray.h\n | | | | | | |-- EmulateCXX11Meta.h\n | | | | | |-- Tensor\n | | | | | | |-- Tensor.h\n | | | | | | |-- TensorArgMax.h\n | | | | | | |-- TensorAssign.h\n | | | | | | |-- TensorBase.h\n | | | | | | |-- TensorBroadcasting.h\n | | | | | | |-- TensorChipping.h\n | | | | | | |-- TensorConcatenation.h\n | | | | | | |-- TensorContraction.h\n | | | | | | |-- TensorContractionCuda.h\n | | | | | | |-- TensorContractionThreadPool.h\n | | | | | | |-- TensorConversion.h\n | | | | | | |-- TensorConvolution.h\n | | | | | | |-- TensorCustomOp.h\n | | | | | | |-- TensorDevice.h\n | | | | | | |-- TensorDeviceCuda.h\n | | | | | | |-- TensorDeviceDefault.h\n | | | | | | |-- TensorDeviceThreadPool.h\n | | | | | | |-- TensorDimensionList.h\n | | | | | | |-- TensorDimensions.h\n | | | | | | |-- TensorEvalTo.h\n | | | | | | |-- TensorEvaluator.h\n | | | | | | |-- TensorExecutor.h\n | | | | | | |-- TensorExpr.h\n | | | | | | |-- TensorFFT.h\n | | | | | | |-- TensorFixedSize.h\n | | | | | | |-- TensorForcedEval.h\n | | | | | | |-- TensorForwardDeclarations.h\n | | | | | | |-- TensorFunctors.h\n | | | | | | |-- TensorGenerator.h\n | | | | | | |-- TensorIO.h\n | | | | | | |-- TensorImagePatch.h\n | | | | | | |-- TensorIndexList.h\n | | | | | | |-- TensorInflation.h\n | | | | | | |-- TensorInitializer.h\n | | | | | | |-- TensorIntDiv.h\n | | | | | | |-- TensorLayoutSwap.h\n | | | | | | |-- TensorMacros.h\n | | | | | | |-- TensorMap.h\n | | | | | | |-- TensorMeta.h\n | | | | | | |-- TensorMorphing.h\n | | | | | | |-- TensorPadding.h\n | | | | | | |-- TensorPatch.h\n | | | | | | |-- TensorReduction.h\n | | | | | | |-- TensorReductionCuda.h\n | | | | | | |-- TensorRef.h\n | | | | | | |-- TensorReverse.h\n | | | | | | |-- TensorShuffling.h\n | | | | | | |-- TensorStorage.h\n | | | | | | |-- TensorStriding.h\n | | | | | | |-- TensorTraits.h\n | | | | | | |-- TensorUInt128.h\n | | | | | | |-- TensorVolumePatch.h\n | | | | | |-- TensorSymmetry\n | | | | | |-- DynamicSymmetry.h\n | | | | | |-- StaticSymmetry.h\n | | | | | |-- Symmetry.h\n | | | | | |-- util\n | | | | | |-- TemplateGroupTheory.h\n | | | | |-- src\n | | | | |-- AutoDiff\n | | | | | |-- AutoDiffJacobian.h\n | | | | | |-- AutoDiffScalar.h\n | | | | | |-- AutoDiffVector.h\n | | | | |-- BVH\n | | | | | |-- BVAlgorithms.h\n | | | | | |-- KdBVH.h\n | | | | |-- Eigenvalues\n | | | | | |-- ArpackSelfAdjointEigenSolver.h\n | | | | |-- FFT\n | | | | | |-- ei_fftw_impl.h\n | | | | | |-- ei_kissfft_impl.h\n | | | | |-- IterativeSolvers\n | | | | | |-- ConstrainedConjGrad.h\n | | | | | |-- DGMRES.h\n | | | | | |-- GMRES.h\n | | | | | |-- IncompleteLU.h\n | | | | | |-- IterationController.h\n | | | | | |-- MINRES.h\n | | | | | |-- Scaling.h\n | | | | |-- KroneckerProduct\n | | | | | |-- KroneckerTensorProduct.h\n | | | | |-- LevenbergMarquardt\n | | | | | |-- LMcovar.h\n | | | | | |-- LMonestep.h\n | | | | | |-- LMpar.h\n | | | | | |-- LMqrsolv.h\n | | | | | |-- LevenbergMarquardt.h\n | | | | |-- MatrixFunctions\n | | | | | |-- MatrixExponential.h\n | | | | | |-- MatrixFunction.h\n | | | | | |-- MatrixLogarithm.h\n | | | | | |-- MatrixPower.h\n | | | | | |-- MatrixSquareRoot.h\n | | | | | |-- StemFunction.h\n | | | | |-- MoreVectorization\n | | | | | |-- MathFunctions.h\n | | | | |-- NonLinearOptimization\n | | | | | |-- HybridNonLinearSolver.h\n | | | | | |-- LevenbergMarquardt.h\n | | | | | |-- chkder.h\n | | | | | |-- covar.h\n | | | | | |-- dogleg.h\n | | | | | |-- fdjac1.h\n | | | | | |-- lmpar.h\n | | | | | |-- qrsolv.h\n | | | | | |-- r1mpyq.h\n | | | | | |-- r1updt.h\n | | | | | |-- rwupdt.h\n | | | | |-- NumericalDiff\n | | | | | |-- NumericalDiff.h\n | | | | |-- Polynomials\n | | | | | |-- Companion.h\n | | | | | |-- PolynomialSolver.h\n | | | | | |-- PolynomialUtils.h\n | | | | |-- Skyline\n | | | | | |-- SkylineInplaceLU.h\n | | | | | |-- SkylineMatrix.h\n | | | | | |-- SkylineMatrixBase.h\n | | | | | |-- SkylineProduct.h\n | | | | | |-- SkylineStorage.h\n | | | | | |-- SkylineUtil.h\n | | | | |-- SparseExtra\n | | | | | |-- BlockOfDynamicSparseMatrix.h\n | | | | | |-- BlockSparseMatrix.h\n | | | | | |-- DynamicSparseMatrix.h\n | | | | | |-- MarketIO.h\n | | | | | |-- MatrixMarketIterator.h\n | | | | | |-- RandomSetter.h\n | | | | |-- Splines\n | | | | |-- Spline.h\n | | | | |-- SplineFitting.h\n | | | | |-- SplineFwd.h\n | | | |-- filters\n | | | | |-- digital_filter.cc\n | | | | |-- digital_filter.h\n | | | | |-- digital_filter_coefficients.cc\n | | | | |-- digital_filter_coefficients.h\n | | | | |-- mean_filter.cc\n | | | | |-- mean_filter.h\n | | | |-- map_matching\n | | | | |-- LocalGeographicCS.hpp\n | | | | |-- circle.h\n | | | | |-- convert_coordinates.hpp\n | | | | |-- coordinate_transformation.h\n | | | | |-- cs.h\n | | | | |-- heading.h\n | | | | |-- localization_.h\n | | | | |-- map_matching.h\n | | | | |-- navi_point.h\n | | | | |-- point.h\n | | | | |-- spline.h\n | | | | |-- steering_angle.h\n | | | |-- math\n | | | |-- linear_quadratic_regulator.cc\n | | | |-- linear_quadratic_regulator.h\n | | | |-- math_utils.cc\n | | | |-- math_utils.h\n | | | |-- vec2d.cc\n | | | |-- vec2d.h\n | | |-- lat_controller\n | | | |-- lat_controller.h\n | | |-- lon_controller\n | | | |-- lon_controller.h\n | | | |-- vehicle_dynamics.h\n | | |-- lqr_controller\n | | | |-- lqr_lat_controller.h\n | | | |-- simple_lateral_debug.h\n | | |-- pid\n | | |-- pid_controller.h\n | |-- lib\n | |-- libcontroller.so\n |-- Map\n | |-- readme\n | |-- include\n | | |-- Attribute.hpp\n | | |-- BoundingBox.hpp\n | | |-- CompoundLanelet.hpp\n | | |-- LLTree.hpp\n | | |-- Lanelet.hpp\n | | |-- LaneletBase.hpp\n | | |-- LaneletFwd.hpp\n | | |-- LaneletGraph.hpp\n | | |-- LaneletMap.hpp\n | | |-- LineStrip.hpp\n | | |-- MapData.h\n | | |-- MapInterface.h\n | | |-- RTree.h\n | | |-- RegulatoryElement.hpp\n | | |-- RoadMap.h\n | | |-- lanelet_point.hpp\n | | |-- llet_xml.hpp\n | | |-- mercator.hpp\n | | |-- normalize_angle.hpp\n | | |-- prettyprint.hpp\n | | |-- regulator.h\n | |-- lib\n | |-- libroad_map.so\n |-- Navi\n | |-- readme\n | |-- include\n | | |-- route.h\n | | |-- route_data.h\n | |-- lib\n | |-- libroute.so\n |-- Planning\n |-- include\n | |-- collision_check\n | | |-- collision_check.h\n | |-- common\n | | |-- LocalGeographicCS.hpp\n | | |-- car_state.h\n | | |-- color_util.h\n | | |-- convert_coordinates.hpp\n | | |-- cs.h\n | | |-- enum_list.h\n | | |-- math_util.h\n | | |-- navi_point.h\n | | |-- path.h\n | | |-- path_tools.h\n | | |-- point.h\n | | |-- rect.h\n | |-- map_matching\n | | |-- map_matching.h\n | |-- park\n | | |-- park.h\n | |-- planning\n | | |-- planning.h\n | | |-- planning_output.h\n | | |-- planning_param.h\n | | |-- route_data.h\n | |-- spline\n | | |-- math_tools.h\n | | |-- quartic_spline.h\n | | |-- quintic_spline.h\n | | |-- spline.h\n | |-- trajectory\n | | |-- trajectory.h\n | | |-- trajectory_sets.h\n | |-- vehicle_dynamic\n | |-- cau_heading_steering.h\n | |-- circle.h\n | |-- heading.h\n | |-- nearest_point_on_spline.h\n | |-- steering_angle.h\n |-- lib\n |-- libparking.so\n |-- libplanning.so\n |-- libquartic_spline.so\n |-- libquintic_spline.so\n"
},
{
"alpha_fraction": 0.6600614190101624,
"alphanum_fraction": 0.6614568829536438,
"avg_line_length": 30.991071701049805,
"blob_id": "8694deb1a4db55535cb2ae4cbb86d1b1fbea636e",
"content_id": "263144e3d058abe8f89ed6d233e9f4dbed163392",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3583,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 112,
"path": "/athena/core/x86/Camera/lane_detect/include/LaneDetector.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef\t_LaneDetector_H_\n#define\t_LaneDetector_H_\n\n#include \"utils/config.h\"\n#include \"utils/config2.h\"\t// configuration file\n#include \"utils/tmc_stereobmp-forMono.h\"\n#include \"bean/LaneParameterEstimator.h\"\n#include \"bean/LaneArea.h\"\n#include \"utils/imrgb.h\"\n#include \"utils/my_resource.h\"\n#include \"utils/roadimage_window.h\"\n\ndouble getSelfVelocity(void);\nvoid setSelfVelocity(double dV);\n\nclass LaneDetector\n{\nprivate:\n//\tint *_piProcLine;\n//\tUchar *_pucProcLineImage;\n PARAM_CAM *_pCamParam;\n int _aiProcLineNumber[NF_NUM];\n LaneArea _pLaneArea;\n\n LaneMarkerPoints *_pUpEdgePoints;\n LaneMarkerPoints *_pDownEdgePoints;\n Disparity *_pDisparity;\n\npublic:\n inline LaneDetector(void)\n {\n _aiProcLineNumber[NF_NEAR] = 30;\n _aiProcLineNumber[NF_FAR] = 20;\n _pUpEdgePoints = new LaneMarkerPoints;\n _pDownEdgePoints = new LaneMarkerPoints;\n _pDisparity = NULL;\n _pCamParam = NULL;\n }\n inline ~LaneDetector(void)\n {\n SAFE_DELETE(_pUpEdgePoints);\n SAFE_DELETE(_pDownEdgePoints)\n _pCamParam = NULL;\n SAFE_DELETE(_pDisparity);\n SAFE_DELETE(_pCamParam);\n }\n inline LaneArea *Area(void)\n {\n return &_pLaneArea;\n }\n BOOL initialize(void);\n BOOL initialize(int iRegionHeight);\n BOOL initialize(PARAM_CAM *pCamParam);\n BOOL initializeWithAVIFileName(char *pcAVIFileName);\n BOOL detect(Uchar *pInputImage);\n//\tBOOL detect(IplImage *pInputImage);\n//\tBOOL detect(IplImage *pInputImage, double dVelocity);\n BOOL detect(IplImage *pInputImage, double dVelocity, Disparity *pDisparity);\n\n BOOL sortFlexArrayDouble(FlexArray<double> *pfa);\n BOOL sortLaneMarkerLinesByOffset(LaneMarkerLines *pLMLs);\n BOOL checkMismatchOfLaneMarkerPointsFromLaneParameter(PARAM_CAM *pCamParam, LaneParameterOneSide *pLPOneSide, LaneMarkerPoints *pLMPs, LaneMarkerPoints *pNewLMPs);\n BOOL getMinMax(LaneMarkerPoints *pLMPs, double *pdXmin, double *pdXmax, double *pdYmin, double *pdYmax, double *pdZmin, double *pdZmax);\n BOOL getYMin(LaneMarkerPoints *pLMPs, double *pdYmin);\n LaneMarkerPoints *mergeLaneMarkerPointsHead(LaneMarkerPoints *pBaseLMPs,LaneMarkerPoints *pLMPs);\n LaneMarkerPoints *mergeLaneMarkerPointsTail(LaneMarkerPoints *pBaseLMPs,LaneMarkerPoints *pLMPs);\n\n inline PARAM_CAM *CameraParam(void)\n {\n return _pCamParam;\n }\n inline int &ProcLineNumber(int iIdx)\n {\n return _aiProcLineNumber[iIdx];\n }\n//\tvoid searchTopsInBallotBox(void);\n inline LaneMarkerPoints *getUpEdgePoints(void)\n {\n return _pUpEdgePoints;\n }\n inline LaneMarkerPoints *getDownEdgePoints(void)\n {\n return _pDownEdgePoints;\n }\n inline void setCamParam(PARAM_CAM *p)\n {\n if(p == NULL)\treturn;\n SAFE_DELETE(_pCamParam);\n _pCamParam = new PARAM_CAM;\n if(_pCamParam == NULL)\treturn;\n (*_pCamParam) = *p;\n }\n inline PARAM_CAM *getCamParam(void)\n {\n return _pCamParam;\n }\n inline Disparity *getDisparity(void)\n {\n return _pDisparity;\n }\n inline void setDisparity(Disparity *p)\n {\n SAFE_DELETE(_pDisparity);\n _pDisparity = p;\n }\n};\nextern BOOL getMinMax(LaneMarkerPoints *pLMPs, double *pdXmin, double *pdXmax, double *pdYmin, double *pdYmax, double *pdZmin, double *pdZmax);\nextern BOOL getYMin(LaneMarkerPoints *pLMPs, double *pdYmin);\n\n//////////////////////////////////////////////////////////////////////////\n//////////////////////////////////////////////////////////////////////////\n#endif\t_LaneDetector_H_\n"
},
{
"alpha_fraction": 0.5717284679412842,
"alphanum_fraction": 0.5745276212692261,
"avg_line_length": 16.216867446899414,
"blob_id": "ae4800c13ab5087a2461cd785e49c35d35f25303",
"content_id": "2915bd1b1ec121cc35cd5a6e1a25eda56a6f5306",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1787,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 83,
"path": "/athena/core/arm/Planning/include/common/path.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief 提供关于路径的一些信息和计算方法。\n */\n\n#pragma once\n#include <vector>\n#include <string>\n#include \"navi_point.h\"\n\nusing namespace std;\n\n/**\n * @class path\n * @brief 路径信息。\n */\nclass path\n{\npublic:\n vector<navi_point> ref_points; ///<参考路径\n\n int insert_pos; ///<current insert pos;\n int current_pos; ///<current pos of vehicle;\n\n int st_pos; ///< start of motion plan pos\n int en_pos; ///< end of motion plan pos\n\npublic:\n /**\n * @brief 构造函数\n */\n path()\n {\n insert_pos = 0;\n current_pos = 0;\n st_pos = 0;\n en_pos = 0;\n }\n\n /**\n * @brief 析构函数\n */\n ~path()\n {\n\n }\n\n /**\n * @brief 从文件中读取一条轨迹,赋值到参考路径ref_points\n * @param path_gps_log_file 输入量:文件名。\n */\n void read_navi_file(string path_gps_log_file);\n\n /**\n * @brief 将路径ref_赋值到参考路径ref_points\n * @param ref_ 输入量:输入路径。\n */\n void reset_path(const vector<navi_point>& ref_);\n\n /**\n * @brief 将参考路径ref_points输出到文件中\n * @param filename 输入量:文件名。\n */\n void output_navi_point_all(char *filename);\n\n\n\n};\n\n/**\n* @brief 计算路径其中一段内各点的里程\n* @param points 输入量:路径点列。\n* @param start_pos 输入量:需要计算里程的起始位置。\n* @param end_pos 输入量:需要计算里程的终点位置。\n*/\nvoid cau_all_mileage_of_points(vector<navi_point>& points, int start_pos, int end_pos);\n\n/**\n* @brief 计算整条路径中各点的里程\n* @param virtual_lane 输入量:需要计算里程的路径。\n*/\nvoid cau_all_mileage_of_lane(path& virtual_lane);\n"
},
{
"alpha_fraction": 0.5872921347618103,
"alphanum_fraction": 0.5872921347618103,
"avg_line_length": 19.790122985839844,
"blob_id": "544b9d5c22c0db647ecb0a245d7a29711c9622b5",
"content_id": "f4eb9f83bcc22e246ec7182ec766ee26647a0c04",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1822,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 81,
"path": "/athena/core/arm/Common/include/base/config/route_config.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef _NAD_ROUTE_CONFIG_H\n#define _NAD_ROUTE_CONFIG_H\n\n#include <string>\n#include <vector>\n\nusing std::vector;\nusing std::string;\n//引用base头文件\n#include \"../nad_base.h\"\n\nclass nad_route_config\n{\npublic:\n double lat;\n double lon;\n int type;\npublic:\n void load_from_xml( pugi::xml_node &node);\n\n //重载赋值\n nad_route_config & operator = (const nad_route_config &route)\n {\n lat = route.lat;\n lon = route.lon;\n type = route.type;\n return *this;\n }\n\n //重载打印输出\n friend inline ostream & operator << (ostream & os, nad_route_config &route)\n {\n cout << \"lat=\" << route.lat << \", lon=\" << route.lon << \", type=\" << route.type << endl;\n return os;\n }\n};\n\nclass nad_route_config_list\n{\npublic:\n //网元配置数组\n map<string, vector<nad_route_config> > route_map;\n\npublic:\n //查找网元\n nad_route_config *find(string name);\n\n //从xml文件中加载\n int load_from_file(string filename);\n\n //重载打印输出\n friend inline ostream & operator << (ostream & os, nad_route_config_list &ne)\n {\n map<string, vector<nad_route_config> >::iterator it;\n for(it = ne.route_map.begin(); it != ne.route_map.end(); it++)\n {\n vector<nad_route_config>::iterator it_inner;\n for (it_inner = it->second.begin(); it_inner != it->second.end(); ++it_inner)\n std::cout << *it_inner << std::endl;\n }\n return os;\n }\n};\n\n//路线规划\nclass route_config\n{\npublic:\n nad_route_config_list route_list;\npublic:\n //文件中加载地图相关信息,包括坐标原点信息\n int load_from_file(string filename);\n\n void show();\n};\n\nint nad_route_config_init();\n//全局配置数据结构\nextern route_config g_route_config;\n\n#endif\n"
},
{
"alpha_fraction": 0.625798225402832,
"alphanum_fraction": 0.6372924447059631,
"avg_line_length": 22.727272033691406,
"blob_id": "09a9a4b46ed206767229b058561a97177fc923ac",
"content_id": "57400f4344591d900c0abb67f5d48c386ea4b674",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1800,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 66,
"path": "/athena/core/x86/Camera/vision_ssd_detect/include/ssd_detection/vision_detector.hpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n#include <caffe/caffe.hpp>\n#include <opencv2/core/core.hpp>\n#include <opencv2/highgui/highgui.hpp>\n#include <opencv2/imgproc/imgproc.hpp>\n#include <algorithm>\n#include <iomanip>\n#include <iosfwd>\n#include <memory>\n#include <string>\n#include <utility>\n#include <vector>\nusing namespace std;\nusing namespace caffe;\n\n///卷积特征目标检测\nclass Detector\n {\n public:\n ////////////////////\n /// @param\tp1 网络模型文件\n /// @param\tp2 权重文件.\n /// @param p3 均值文件说明\n /// @param p4 图片均值(104,117,123)\n /// @param p5 是否用GPU\n /// @param p4 GPU 序列号\n ///////////////////////////////\n Detector(const string& model_file,\n const string& weights_file,\n const string& mean_file, const string& mean_value,bool in_use_gpu, unsigned int in_gpu_id);\n ///输出目标结果\n std::vector<vector<float> > Detect(const cv::Mat& img);\n\nprivate:\n ///设置图片像素均值\n void SetMean(const string& mean_file, const string& mean_value);\n /// 将cv Mat对象包装转换为网络卷积层输入的格式\n void WrapInputLayer(std::vector<cv::Mat>* input_channels);\n ///将输入图像转换为caffe网络的输入图像格式\n void Preprocess(const cv::Mat& img,\n std::vector<cv::Mat>* input_channels);\n\n private:\n /// 网络结构指针\n shared_ptr<Net<float> > net_;\n /// 输入图片大小\n cv::Size input_geometry_;\n /// 图片通道数\n int num_channels_;\n /// 设置均值图片\n cv::Mat mean_;\n};\n/// pascal voc 数据caffe-ssd检测\nnamespace Ssd\n{\n /// 分类类别枚举值\n\tenum SsdDetectorClasses\n\t{\n\t\tBACKGROUND,\n\t\tPLANE, BICYCLE, BIRD, BOAT,\n\t\tBOTTLE, BUS, CAR, CAT, CHAIR,\n\t\tCOW, TABLE, DOG, HORSE,\n\t\tMOTORBIKE, PERSON, PLANT,\n\t\tSHEEP, SOFA, TRAIN, TV, NUM_CLASSES\n\t};\n}\n"
},
{
"alpha_fraction": 0.551111102104187,
"alphanum_fraction": 0.5659258961677551,
"avg_line_length": 20.0625,
"blob_id": "933d2883ff098fe94067ee3e1328b2d32af000e2",
"content_id": "b65f38627f3e98f3433ad534aa41aa90e9304330",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 697,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 32,
"path": "/athena/core/x86/Planning/include/vehicle_dynamic/circle.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <math.h>\n#include <vector>\n\n#include \"common/navi_point.h\"\n#include \"spline/spline.h\"\n\nusing namespace std;\n\n// 点的位置\nclass circle\n{\npublic:\n // 传感器相对坐标\n double R;\n double ks;\n\n};\n\nint get_circle(double x1, double y1,\n double x2, double y2,\n double x3, double y3,\n double& x, double& y,\n double& r, double& ks);\n\nint sign_circle_e(double x, double y,\n double x1, double y1,\n double x2, double y2);\n\ndouble compute_ks_from_spline( double first_deriv_x, double second_deriv_x,\n double first_deriv_y, double second_deriv_y );\n\n"
},
{
"alpha_fraction": 0.7636363506317139,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 51,
"blob_id": "932a6ee475e3f60d3ae66a3d9ec59f122e44849a",
"content_id": "1b83851e30c192ea3e28e8b2cdac09466b426410",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 55,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 1,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/acc/cs55/cs55_torque_speed_throttle_map.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n\ndouble CS55GetAccValue(double speed,double torque);\n\n"
},
{
"alpha_fraction": 0.6815286874771118,
"alphanum_fraction": 0.6942675113677979,
"avg_line_length": 14.699999809265137,
"blob_id": "5f8562e9ab592a7d51ce69856552d64f25b2d220",
"content_id": "789cdac07fa2b33ecf3951a9e10c8595e1a97622",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 157,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 10,
"path": "/athena/examples/LCM/Singlecar/control/common/timer_app.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#include \"timer_app.h\"\n\nnamespace athena{\nnamespace control{\n void TimerApp::add_timer(func OnTimer)\n {\n timer.StartTimer(10, std::bind(OnTimer));\n }\n}\n}\n"
},
{
"alpha_fraction": 0.5798583626747131,
"alphanum_fraction": 0.6089693307876587,
"avg_line_length": 19.786884307861328,
"blob_id": "ca7f93dabc58ac90bae7c169c8d42dd1b9022f80",
"content_id": "7b91c4b62aabd0cdfd03728a7763ad371c5cafff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1313,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 61,
"path": "/athena/examples/LCM/Singlecar/planning/main.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/**\n * @file\n *\n * @brief a planning demo of lcm.\n */\n\n#include <iostream>\n\n#include <lcm/lcm.h>\n#include <lcm/lcm-cpp.hpp>\n\n\n#include \"planning_lcm_msg.h\"\n#include \"planning_node.h\"\n\n\n\n#include \"planning/planning.h\"\n#include \"common/car_state.h\"\n\nusing namespace std;\n\n\n///主程序\nint main(int argc, char *argv[])\n{\n cout << \" Motion planning start ! 2018.11.19: 1:25. @ by liming in Guanggu\" << endl;\n\n PlanningNode pn;\n pn.run();\n\n return 0;\n}\n\n///park test 主程序\n//int main(int argc, char *argv[])\n//{\n// cout << \" Motion planning start ! 2018.07.28: 15:25. @ by liming in Guanggu\" << endl;\n//\n// IntelligentParking ip;\n// path park_trajectory_d, park_trajectory_r;\n//\n// ///车停止位置\n// CarState car_state;\n// car_state.car_pose_.CurrentX_ = -4.0;\n// car_state.car_pose_.CurrentY_ = 10.0;\n// car_state.car_pose_.Current_heading_ = 90.0;\n//\n// ///生成轨迹\n// ip.compute_parking_trajectory( car_state );\n//\n// ///D档部分\n// ip.get_trajectory_d( park_trajectory_d );\n// ip.extend_trajectory( park_trajectory_d, NUM_EXTEND_TRAJECTORY, 0.1, AT_STATUS_D );\n//\n// ///R档部分\n// ip.get_trajectory_r( park_trajectory_r );\n// ip.extend_trajectory( park_trajectory_r, NUM_EXTEND_TRAJECTORY, 0.1, AT_STATUS_R );\n//\n// return 0;\n//}\n\n\n\n"
},
{
"alpha_fraction": 0.6404374241828918,
"alphanum_fraction": 0.6831154823303223,
"avg_line_length": 29.47967529296875,
"blob_id": "56cd2138a675485d0318168814b907372fc7d36e",
"content_id": "7a1b1a1084331b25a7e300549e3ca4883f810221",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3749,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 123,
"path": "/athena/core/x86/Camera/lane_detect/include/bean/LaneParameterEstimator.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#ifndef\t_LANEPARAMETERESTIMATOR_H_\n#define\t_LANEPARAMETERESTIMATOR_H_\n#include <stdio.h>\n\n#include \"../utils/config.h\"\n#include \"../utils/config2.h\"\n#include \"../utils/matutil-d.h\"\n\n#include \"../utils/type.h\"\n\nclass PARAM_CAM;\nclass LaneMarkerPoints;\nclass LaneMarkerPoint;\n\n#define\tCS4_STATUS_NUMBER\t8\n//#define\tCS4_OBS_NUMBER\t10\n#define\tCS4_OBS_NUMBER\t1\n#define\tCS4_CONTROL_INPUT_NUMBER\t2\n#define\tCS4_STOCHASTIC_VARIAVLE_NUMBER\t8//3\n#define\tCDB_PSIDASH_SCALE\t\t1\n#define\tCDB_PSI_SCALE\t\t\t1\n#define\tCDB_EDASH_SCALE\t1\n#define\tCDB_E_SCALE\t\t1\n#define\tCDB_RHODASH_SCALE\t\t1\n#define\tCDB_RHO_SCALE\t\t\t1\n#define\tCDB_DELTAPHI_SCALE\t\t1\n#define\tCDB_WIDTH_SCALE\t\t\t1\n\nconst int LPID_YAWDASH\t\t\t=\t0;\nconst int LPID_YAW\t\t\t\t=\t1;\nconst int LPID_OFFSETDASH\t\t=\t2;\nconst int LPID_OFFSET\t\t\t=\t3;\nconst int LPID_CURVATUREDASH\t=\t4;\nconst int LPID_CURVATURE\t\t=\t5;\nconst int LPID_PITCH\t\t\t=\t6;\nconst int LPID_LANEWIDTH\t\t=\t7;\n\n#define\tCX0\t\t (pCamParam->ParamCam()->i_x0)\n#define\tCY0\t\t (pCamParam->ParamCam()->i_y0)\n#define\tWIDTH\t (pCamParam->ParamCam()->width)\n#define HEIGHT\t (pCamParam->ParamCam()->height)\n#define\tFOCUS\t (pCamParam->ParamCam()->pix_f_y)\n#define\tFOCUSX\t (pCamParam->ParamCam()->pix_f_x)\n#define\tFOCUSY\t (pCamParam->ParamCam()->pix_f_y)\n#define\tPOSX\t (pCamParam->ParamCam()->cam_pos_x)\n#define\tPOSY\t (pCamParam->ParamCam()->cam_pos_y)\n#define\tPOSZ\t (pCamParam->ParamCam()->cam_pos_z)\n#define\tPITCH\t (pCamParam->ParamCam()->pitch / 180. * M_PI)\n#define\tYAW\t\t (pCamParam->ParamCam()->yaw / 180. * M_PI)\n#define\tCV\t\t (pCamParam->getCurvatureVertical())\n//#define\tMINIMUM_CV\t(0.0001)\n\n#define\tTCX0\t\t(lCamParamTmp.ParamCam()->i_x0)\n#define\tTCY0\t\t(lCamParamTmp.ParamCam()->i_y0)\n#define\tTWIDTH\t(lCamParamTmp.ParamCam()->width)\n#define\tTHEIGHT\t(lCamParamTmp.ParamCam()->height)\n#define\tTFOCUS\t(lCamParamTmp.ParamCam()->pix_f_y)\n#define\tTFOCUSX\t(lCamParamTmp.ParamCam()->pix_f_x)\n#define\tTFOCUSY\t(lCamParamTmp.ParamCam()->pix_f_y)\n#define\tTPOSX\t(lCamParamTmp.ParamCam()->cam_pos_x)\n#define\tTPOSY\t(lCamParamTmp.ParamCam()->cam_pos_y)\n#define\tTPOSZ\t(lCamParamTmp.ParamCam()->cam_pos_z)\n#define\tTPITCH\t(lCamParamTmp.ParamCam()->pitch / 180. * M_PI)\n#define\tTYAW\t\t(lCamParamTmp.ParamCam()->yaw / 180. * M_PI)\n#define\tTCV\t\t(lCamParamTmp.getCurvatureVertical())\n\n//yu_2014.9.20\nextern void transformInputToRoad(PARAM_CAM *pCamParam, int iIsrc, int iJsrc, double *pdZg, double *pdXg);\nextern double calcVimageFromXvehicleAndZvehicleOnRoad(PARAM_CAM *pCamParam, double a_dXVehicle, double a_dZVehicle);\nextern double calcVimageFromXvehicleAndZvehicleOnRoadByNearAreaParameter(PARAM_CAM *pCamParam, double a_dXVehicle, double a_dZVehicle);\nextern double calcUimageFromIsrc(PARAM_CAM *pCamParam, int iIsrc);\n\n#define\tBUFSIZE\t1024\n// psidash, psi, edash, e rhodash, rho, phi, w\n// psidash, psi, edash, e rhodash, rho, phi, w\nstatic double g_adb_X_0[CS4_STATUS_NUMBER * 1] = {\n\t0.1 / 180 * M_PI,\t// psidash\n\t1.0 / 180 * M_PI,\t// psi\n\t100.0,\t// edash,\n\t1000.0,\t// e\n\t1.0e-12,\t// rhodash\n\t1.0e-9,\t// rho,\n\t5.0 / 180 * M_PI,\t// phi\n\tDB_DP_AVE * 1.1//4000\t// w\n};\nstatic double g_adb_X_m[CS4_STATUS_NUMBER * 1] = {\n\t(0.0) / CDB_PSIDASH_SCALE,\t// psidash\n\t(0.0) / CDB_PSI_SCALE,\t// psi\n\t(0.0) / CDB_EDASH_SCALE,\t// edash,\n\t(0.0) / CDB_E_SCALE,\t// e\n\t(0.0) / CDB_RHODASH_SCALE,\t// rhodash\n\t(0.0) / CDB_RHO_SCALE,\t// rho,\n\t(0.0) / CDB_DELTAPHI_SCALE,\t// phi\n\t(DB_DP_AVE) / CDB_WIDTH_SCALE\t// w[mm]\n};\n\nstatic double g_adb_W_m[CS4_STOCHASTIC_VARIAVLE_NUMBER * 1] = {\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0\n};\nstatic double g_adb_W_m0[CS4_STOCHASTIC_VARIAVLE_NUMBER * 1] = {\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0,\n\t1.0\n};\nstatic double g_adb_V_m[CS4_OBS_NUMBER * 1] = {\n\tV_V_M * OBSERVER_ERROR_FACTOR\n};\n\n\n\n#endif\t_LANEPARAMETERESTIMATOR_H_\n"
},
{
"alpha_fraction": 0.5880952477455139,
"alphanum_fraction": 0.6380952596664429,
"avg_line_length": 17.2608699798584,
"blob_id": "022d5c339f69a9c3e0b331c4edb9e7d998f92900",
"content_id": "711d5eaf927e7bd3ef64aaa9cd29bcfbfa82aed2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1272,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 69,
"path": "/athena/core/x86/Planning/include/common/cs.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "#pragma once\n\n#include <boost/function.hpp>\n#include <boost/bind.hpp>\n#include <vector>\n\n#include \"common/LocalGeographicCS.hpp\"\n//#include \"planning/planning_param.h\"\n#include \"config.h\"\n\n\ntypedef std::pair<double,double> ll_point;\ntypedef std::pair<double,double> xy_point;\ntypedef std::tuple<double,double,double> xys_point;\ntypedef std::tuple<double,double,int32_t,double,double,int32_t,double> xyh_point;\ntypedef std::tuple<double,double,double> blh_point;\ntypedef std::vector<xyh_point> xyh_vec;\ntypedef std::vector<xyh_point> blh_vec;\n\nusing namespace std;\n\nenum LATLON_COORDS_CS\n{\n LAT_CS = 0,\n LON_CS = 1,\n ID_CS = 2,\n};\n\nenum XYH_POINT_CENTROL\n{\n X_CTR = 0,\n Y_CTR = 1,\n ID_CTR = 2,\n HEAD_CTR = 3,\n WIDTH_CTR = 4,\n DIV_CTR = 5,\n S_CTR = 6,\n};\n\nenum XY_COORDS_CS\n{\n X_CS = 0,\n Y_CS = 1,\n S_CS = 2\n};\n\nclass coord_transfer\n{\npublic:\n LocalGeographicCS cs;\n\n coord_transfer()\n {\n cs.set_origin( origin_lat, origin_lon );\n }\n ~coord_transfer()\n {\n\n }\n\n //设定原点坐标\n void set_origin();\n\nprivate:\n\n const double origin_lat = ORIGIN_LAT;//30.45814058804; // (rongke) //39.7409469; (tongzhou)\n const double origin_lon = ORIGIN_LON;//114.31801222674; // (rongke) // 116.6545923;\n\n};\n"
},
{
"alpha_fraction": 0.47787609696388245,
"alphanum_fraction": 0.5,
"avg_line_length": 15.142857551574707,
"blob_id": "2baa4736e77df9135c786f4cd41b85f7c8c5feda",
"content_id": "420e5aea230f0ed8f4f939bd7a63cb84a2098894",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 934,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 56,
"path": "/athena/core/arm/Control/include/common/map_matching/point.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "GB18030",
"text": "#pragma once\n\n#include <math.h>\n#include <vector>\n\n// 点的位置\nclass point\n{\npublic:\n // 传感器相对坐标\n float x;\n float y;\n float z;\n\n // 绝对坐标\n float xx;\n float yy;\n float zz;\n\npublic:\n bool is_in_rect(point& p1, point& p2,\n point& p3, point& p4);\n\n bool is_in_rect_xx(point& p1, point& p2,\n point& p3, point& p4);\n\n point()\n {\n x=y=z=0;\n xx=yy=zz=0;\n }\n\n ~point()\n {\n\n }\n\n point& operator= (const point& src)\n {\n this->x = src.x;\n this->y = src.y;\n this->z = src.z;\n\n this->xx = src.xx;\n this->yy = src.yy;\n this->zz = src.zz;\n\n return *this;\n }\n};\n\nbool samep(point p1, point p2);\ndouble dist(point p1,point p2);\npoint midpoint(point p1, point p2);\nfloat get_cross(point& p1, point& p2, point& p);\nfloat get_cross_xx(point& p1, point& p2, point& p);\n"
},
{
"alpha_fraction": 0.5639449954032898,
"alphanum_fraction": 0.5706159472465515,
"avg_line_length": 34.384803771972656,
"blob_id": "cb3621ff8b6de2277ea19811446807b4f87264e8",
"content_id": "eb80441a5d489c3293eb3a4a3dad33e9763eebac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 46672,
"license_type": "no_license",
"max_line_length": 159,
"num_lines": 1237,
"path": "/athena/examples/LCM/Singlecar/obu/src/obu/obu_planning/obu_session_obu.cpp",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "/*-------------------------------------------------------\n * 文件名:obu_session_obu.cpp\n * 创建者:李思政\n * 时 间:2016-04-03\n * 描 述:obu管理自身用到的数据结构的定义\n-------------------------------------------------------*/\n#include \"obu_planning.h\"\n\n\nobu_session_obu::obu_session_obu() : nad_session(\"\")\n{\n session_status = E_OBU_SESSION_OBU_CONNECT_HAND;\n start_auto_status = E_OBU_SESSION_OBU_START_AUTO_WAIT;\n flashing_status = FLASHING_STATUS_NONE;\n wait_count = 0;\n last_htbt = get_current_time();\n cur_lon = 0.0;\n cur_lat = 0.0;\n platoon_sn = 0;\n sug_speed = 20;\n cur_speed = 0.0;\n call_park_state = 0; //可召车状态\n is_auto_drive = false;\n route_plan_status = false;\n}\n\n//析构函数\nobu_session_obu::~obu_session_obu()\n{\n}\n\n//处理定时器\nvoid obu_session_obu::handle_timer(int64_t counter)\n{\n handle_speed();\n\n if(session_status == E_OBU_SESSION_OBU_CONNECT_HAND || session_status == E_OBU_SESSION_OBU_CONNECT_AUTO)\n {\n\n send_ou_vui_report();\n if(route->route_behavior_.center_line_.size() > 0)\n {\n //给motion的相关信息\n nad_lcm::om_info_report om_info_report_msg;\n om_info_report_msg.is_auto_drive = is_auto_drive;\n om_info_report_msg.drive_behavior = DRIVE_BEHAVIOR_OVERTAKE;\n om_info_report_msg.car_action = CAR_ACTION_SINGLE;\n\n //车速\n om_info_report_msg.num_of_command = 1;\n nad_lcm::obu_command cmd;\n cmd.sug_speed = sug_speed;\n\n om_info_report_msg.command.push_back(cmd);\n\n om_info_report_msg.route_time_stamp = route->time_stamp_;\n\n route->locate_on_lane(cur_lat, cur_lon);\n route::center_point &cp = route->route_behavior_.cur_point_;\n /*\n route::center_point *kp = route->get_next_stop_kp(right_of_way);\n if(kp == NULL)\n {\n om_info_report_msg.stop_mileage = 10000.0;\n }\n else\n {\n om_info_report_msg.stop_mileage = kp->dist(cp);\n }\n */\n om_info_report_msg.ending_lane = cp.left_lane_num + 1;\n\n int lc_lane_num = route->get_change_lane(cur_lat,cur_lon,cur_yaw,CL_DIRECTION_LEFT);\n int rc_lane_num = route->get_change_lane(cur_lat,cur_lon,cur_yaw,CL_DIRECTION_RIGHT);\n if( lc_lane_num + rc_lane_num == 0)\n {\n om_info_report_msg.ending_mileage = -1; //小于0不可以换道\n }\n else\n {\n om_info_report_msg.ending_mileage = 1;//>0可以换道\n }\n\n //给motion发送请求\n g_lcm->publish(\"om_info_report\", &om_info_report_msg);\n\n }\n }\n}\n\n//保存到消息\nvoid obu_session_obu::save_to_lcm(nad_lcm::route_planning &other)\n{\n int32_t kpret = 0;\n //基本信息\n other.time_stamp = route->time_stamp_;\n other.destination = route->destination_;\n other.route_reason = route->route_reason_;\n\n //车道列表\n for(size_t i = 0; i < route->map_->lane_list_.size(); i++)\n {\n nad_lcm::lane_of_route tmp;\n tmp.lane_id = route->map_->lane_list_[i].lane_id;\n tmp.num_of_lane = route->map_->lane_list_[i].cur_brother_lane.size();\n tmp.lane.assign(route->map_->lane_list_[i].cur_brother_lane.begin(), route->map_->lane_list_[i].cur_brother_lane.end());\n tmp.length = route->map_->lane_list_[i].length;\n tmp.max_speed = route->map_->lane_list_[i].max_speed;\n other.lane_list.push_back(tmp);\n }\n other.num_of_lane = other.lane_list.size();\n\n //关键点列表\n for(size_t i = 0; i < route->route_behavior_.key_points_.size(); i++)\n {\n kpret = 0;\n for (size_t j = KP_NORMAL; j < (KP_ROUTE_STOP + 1); j++)\n {\n if (1 == route->route_behavior_.key_points_[i].type[j])\n {\n kpret = 1;\n break;\n }\n }\n if (0 == kpret)\n {\n continue;\n }\n if (route->route_behavior_.key_points_[i].type[KP_ROUTE_START] == 1 || route->route_behavior_.key_points_[i].type[KP_ROUTE_STOP] == 1)\n {\n nad_lcm::center_point tmp;\n tmp.x = route->route_behavior_.key_points_[i].x;\n tmp.y = route->route_behavior_.key_points_[i].y;\n tmp.index = route->route_behavior_.key_points_[i].index;\n tmp.yaw = route->route_behavior_.key_points_[i].yaw;\n tmp.width = route->route_behavior_.key_points_[i].width;\n tmp.mileage = route->route_behavior_.key_points_[i].mileage;\n tmp.lane_id = route->route_behavior_.key_points_[i].lane_id;\n //tmp.type = route->route_behavior_.key_points_[i].type;\n //暂时只回传第一个type值,暂时先传normal\n if (route->route_behavior_.key_points_[i].type[KP_ROUTE_START] == 1)\n {\n tmp.type = KP_ROUTE_START;\n }\n if (route->route_behavior_.key_points_[i].type[KP_ROUTE_STOP] == 1)\n {\n tmp.type = KP_ROUTE_STOP;\n }\n //std::cout << \"just set type to normal ,TOBEDONE\" << std::endl;\n tmp.left_lane_num = route->route_behavior_.key_points_[i].left_lane_num;\n tmp.right_lane_num = route->route_behavior_.key_points_[i].right_lane_num;\n other.key_point_list.push_back(tmp);\n }\n }\n other.num_of_kp = other.key_point_list.size();\n}\n\n//保存到消息\nvoid obu_session_obu::save_to_lcm(nad_lcm::obu_info &obu)\n{\n obu.obu_name = name;\n obu.obu_type = obu_type;\n obu.gps_time = gps_time;\n obu.cur_lon = cur_lon;\n obu.cur_lat = cur_lat;\n obu.cur_yaw = cur_yaw;\n obu.cur_brake = cur_brake;\n save_to_lcm(obu.cur_point);\n obu.cur_speed = cur_speed;\n obu.cur_acceleration = cur_acceleration;\n obu.steering_angle = steering_angle;\n obu.cur_gears = cur_gears;\n obu.is_auto_drive = (session_status == E_OBU_SESSION_OBU_CONNECT_AUTO ||\n session_status == E_OBU_SESSION_OBU_DISCONN_AUTO ? 1 : 0);\n obu.is_online = (session_status == E_OBU_SESSION_OBU_CONNECT_AUTO ||\n session_status == E_OBU_SESSION_OBU_CONNECT_HAND ? 1 : 0);\n obu.platoon_sn = platoon_sn;\n obu.destination = route->destination_;\n obu.sensor_stat[0] = brake_stat;//上报刹车状态//临时方案\n //目前不支持传感器状态上报,填写默认值\n for(int i = 1; i < MAX_SENSOR; i++)\n {\n obu.sensor_stat[i] = sensor_stat[i]; //只有gps是真数据\n }\n obu.sensor_stat[IDX_INS_CENTER] = SENSOR_OK; //其他的替换为假数据\n obu.sensor_stat[IDX_RADAR_ESR] = SENSOR_OK;\n obu.sensor_stat[IDX_RADAR_SRR_FL] = SENSOR_OK;\n obu.sensor_stat[IDX_RADAR_SRR_FR] = SENSOR_OK;\n obu.sensor_stat[IDX_RADAR_SRR_BL] = SENSOR_OK;\n obu.sensor_stat[IDX_RADAR_SRR_BR] = SENSOR_OK;\n}\n\n//保存到消息中\nvoid obu_session_obu::save_to_lcm(nad_lcm::center_point &cp)\n{\n cp.x = route->route_behavior_.cur_point_.x;\n cp.y = route->route_behavior_.cur_point_.y;\n cp.index = route->route_behavior_.cur_point_.index;\n cp.yaw = route->route_behavior_.cur_point_.yaw;\n cp.width = route->route_behavior_.cur_point_.width;\n cp.mileage = route->route_behavior_.cur_point_.mileage;\n cp.lane_id = route->route_behavior_.cur_point_.lane_id;\n cp.type = KP_NORMAL;\n //std::cout << \"just set type to normal ,TOBEDONE\" << std::endl;\n cp.left_lane_num = route->route_behavior_.cur_point_.left_lane_num;\n cp.right_lane_num = route->route_behavior_.cur_point_.right_lane_num;\n}\n\n\nbool obu_session_obu::cl_too_busy(int32_t direction, int32_t cl_reason)\n{\n //拼接过滤字符串\n char buf[100];\n sprintf(buf, \"%s|%s\", cl_direction_str(direction), cl_reason_str(cl_reason));\n string key = buf;\n\n //查找重复换道请求\n int64_t now = get_current_time();\n map<string, int64_t>::iterator it;\n it = cl_list.find(key);\n if (it != cl_list.end() && now < (it->second + MAX_CL_TIMEOUT))\n {\n return true;\n }\n\n //保存当前换道请求\n cl_list[key] = now;\n return false;\n}\n\n//抑制5秒内的重复换道请求\nbool obu_session_obu::cl_too_busy_avoiding(int32_t cl_reason)\n{\n //拼接过滤字符串\n int32_t key = -1;\n int64_t now = get_current_time();//得到当前时间\n if(cl_reason == CL_REASON_HAND)\n {\n key = CL_REASON_HAND;\n cl_avoid_list[key] = now;//更新键值\n }\n\n if(cl_reason == CL_REASON_OBU_AVOIDING)\n {\n //查找重复换道请求\n map<int32_t, int64_t>::iterator it;\n it = cl_avoid_list.find(CL_REASON_HAND);\n if (it != cl_avoid_list.end() && now < (it->second + MAX_CL_TIMEOUT))\n {\n return true;\n }\n\n }\n return false;\n}\n\n\n//内部调用的换道执行操作\nint obu_session_obu::change_lane_ex(int32_t direction, int32_t cl_reason, int starting_lane, int ending_lane,\n bool need_ack, bool check_busy, bool check_target, bool check_follow,\n bool check_forbid, bool check_platoon, bool check_cooperate)\n{\n route::center_point &cp = route->route_behavior_.cur_point_;\n\n //过滤频繁换道请求(人工掰杆换道是人的理智行为,不做频繁检测)\n if (cl_reason != CL_REASON_HAND && check_busy && cl_too_busy(direction, cl_reason))\n {\n return RET_BUSY;\n }\n\n //初始化应答消息\n nad_lcm::om_change_lane_respond om_change_lane_respond_msg;\n om_change_lane_respond_msg.direction = direction;\n om_change_lane_respond_msg.reason = cl_reason;\n\n if (starting_lane == -1 && ending_lane == -1)\n {\n if (direction == CL_DIRECTION_LEFT)\n {\n // om_change_lane_respond_msg.starting_lane = 2;\n om_change_lane_respond_msg.ending_lane = 1;\n }\n else\n {\n //om_change_lane_respond_msg.starting_lane = 1;\n om_change_lane_respond_msg.ending_lane = 2;\n }\n }\n else\n {\n //om_change_lane_respond_msg.starting_lane = starting_lane;\n om_change_lane_respond_msg.ending_lane = ending_lane;\n }\n\n //后门:如果目前没有路径规划,总是允许换道,且不会触发编队换道和协作式换道\n if (cp.index < 0)\n {\n //return send_change_lane_respond(om_change_lane_respond_msg,true, RET_ROUTE_NO_RESULT, \"无路径规划禁止换道!\");\n }\n\n //判断目前是不是禁止换道的区间\n /*\n if (check_forbid && route->get_current_forbid_change())\n {\n return send_change_lane_respond(om_change_lane_respond_msg,need_ack, RET_FORBID, \"当前路段禁止换道!\");\n }\n */\n\n //查找自身,如果有路径规划,还需要判断有没有目标车道\n if (check_target)\n {\n //手工掰杆换道不判断左侧是否有车道,用来规避李老师认为单车道存在right道,反而需要向左纠回的问题\n //无车道判断交给规划来做;当前查找是否禁止换道路段\n int lc_lane_num = route->get_change_lane(cur_lat,cur_lon,cur_yaw,CL_DIRECTION_LEFT);\n int rc_lane_num = route->get_change_lane(cur_lat,cur_lon,cur_yaw,CL_DIRECTION_RIGHT);\n\n\n //进行拨杆抑制\n if(cl_reason == CL_REASON_HAND && direction == CL_DIRECTION_LEFT && lc_lane_num < 1)\n {\n cl_too_busy_avoiding(cl_reason);\n }\n if(cl_reason == CL_REASON_HAND && direction == CL_DIRECTION_RIGHT && rc_lane_num < 1)\n {\n cl_too_busy_avoiding(cl_reason);\n }\n //\n\n if (direction == CL_DIRECTION_LEFT && lc_lane_num < 1 )//左侧无车道,向左拨杆,禁止换道\n {\n // return send_change_lane_respond(om_change_lane_respond_msg,need_ack, RET_NOT_EXIST,\"\");\n }\n //从高速道往低速道变道\n if (direction == CL_DIRECTION_RIGHT && lc_lane_num < 1 && cl_too_busy_avoiding(cl_reason))//拨杆状态\n {\n //return send_change_lane_respond(om_change_lane_respond_msg,need_ack, RET_NOT_EXIST,\"\");\n }\n\n if (direction == CL_DIRECTION_RIGHT && rc_lane_num < 1 )//右侧无车道,向右拨杆,禁止换道\n {\n //return send_change_lane_respond(om_change_lane_respond_msg,need_ack, RET_NOT_EXIST,\"\");\n }\n\n //从低速道往高速道变道\n if (direction == CL_DIRECTION_LEFT && rc_lane_num < 1 && cl_too_busy_avoiding(cl_reason))//拨杆状态\n {\n //return send_change_lane_respond(om_change_lane_respond_msg,need_ack, RET_NOT_EXIST,\"\");\n }\n }\n\n //send_change_lane_respond(om_change_lane_respond_msg,true, RET_OK, \"\"); //允许换道\n\n return RET_OK;\n}\n\n//发送路径规划应答消息\nint obu_session_obu::send_change_lane_respond(nad_lcm::om_change_lane_respond &om_change_lane_respond_msg,bool need_ack, int ret, string alarm)\n{\n if (need_ack || ret == RET_OK)\n {\n om_change_lane_respond_msg.retcode = ret;\n om_change_lane_respond_msg.ending_mileage = 50;\n g_lcm->publish(\"om_change_lane_respond\", &om_change_lane_respond_msg);\n LOG_SEND(log_om_change_lane_respond(&om_change_lane_respond_msg));\n\n if (ret == RET_OK)\n {\n\n route->bind_key_point(route->route_behavior_.cur_point_.index,\n (om_change_lane_respond_msg.direction == CL_DIRECTION_LEFT ? KP_CHANGE_LANE_LEFT : KP_CHANGE_LANE_RIGHT), \"\");\n }\n else\n {\n alarm_report_to_vui(ALARM_WARNING, alarm);\n }\n }\n return ret;\n}\n\nvoid obu_session_obu::send_ou_vui_report()\n{\n nad_lcm::ou_vui_report ou_vui_report_msg;\n save_to_lcm(ou_vui_report_msg.obu);\n\n ou_vui_report_msg.sug_speed = sug_speed;\n\n save_route_kp_to_lcm(\n ou_vui_report_msg.num_of_key, ou_vui_report_msg.key_point,\n ou_vui_report_msg.cur_point_index,\n ou_vui_report_msg.num_of_point, ou_vui_report_msg.line);\n\n save_to_lcm(ou_vui_report_msg.route);\n\n g_lcm->publish(\"ou_vui_report\", &ou_vui_report_msg);\n}\n\n//保存关键点和轨迹线\nvoid obu_session_obu::save_route_kp_to_lcm(int16_t &num_of_key, std::vector<nad_lcm::key_point_info> &key_point,\n int16_t &cur_point_index, int16_t &num_of_point, std::vector<nad_lcm::route_line_point> &line)\n{\n key_point.clear();\n line.clear();\n num_of_key = num_of_point = 0;\n cur_point_index = -1;\n if (route->route_behavior_.center_line_.size() < 2 || route->route_behavior_.key_points_.size() < 2)\n {\n return;\n }\n\n //寻找关键点\n route::center_point *kp1 = route->route_behavior_.start_kp();\n route::center_point *kp2 = route->route_behavior_.stop_kp();\n if (kp1 == NULL || kp2 == NULL)\n {\n LOG(ERROR) << \"start_kp or stop_kp == NULL \" << endl;;\n return;\n }\n int i = kp1->index;\n int e = kp2->index;\n while (i <= e)\n {\n route::center_point &cp = route->route_behavior_.center_line_[i];\n\n //添加轨迹点\n nad_lcm::route_line_point point;\n double lat, lon;\n route->transfer_.cs.xy2ll(cp.x, cp.y, lat, lon);\n point.lat = (float)lat;\n point.lon = (float)lon;\n line.push_back(point);\n\n //添加关键点\n if (cp.type[KP_ROUTE_START] == 1 || cp.type[KP_ROUTE_STOP] == 1 || cp.type[KP_CURRENT] == 1 || cp.type[KP_CO_CHANGE_LANE] == 1)\n {\n nad_lcm::key_point_info info;\n if (cp.type[KP_CURRENT] == 1)\n {\n info.type = KP_CURRENT;\n }\n if (cp.type[KP_ROUTE_START] == 1)\n {\n info.type = KP_ROUTE_START;\n }\n if (cp.type[KP_ROUTE_STOP] == 1)\n {\n info.type = KP_ROUTE_STOP;\n }\n if (cp.type[KP_CO_CHANGE_LANE] == 1)\n {\n info.type = KP_CO_CHANGE_LANE;\n }\n //LOG(ERROR) << \"need to be judged,TOBEDONE \" << endl;;\n info.lat = lat;\n info.lon = lon;\n info.mileage = cp.dist(*kp1);\n info.id = cp.id;\n key_point.push_back(info);\n\n //刷新位置\n if (cp.type[KP_CURRENT] == 1)\n {\n cur_point_index = (int16_t)line.size() - 1;\n }\n }\n\n //获得下一个轨迹点\n get_next_point(i, e, route->route_behavior_.center_line_);\n }\n\n //如果cur点在stop之后,则补充一个current点\n if (cur_point_index < 0 && key_point.size() > 0)\n {\n key_point.push_back(key_point[key_point.size() - 1]);\n key_point.back().type = KP_CURRENT;\n cur_point_index = (int16_t)line.size() - 1;\n }\n\n //保存数组大小\n num_of_key = (int16_t)key_point.size();\n num_of_point = (int16_t)line.size();\n\n //调试打印\n /*route::RouteBase tmp(g_rsu_planning->osm_map);\n route::center_point pp;\n pp.type = KP_NONE;\n pp.index = pp.left_lane_num = pp.right_lane_num = 0;\n pp.yaw = pp.width = pp.mileage = 0.0;\n pp.object = NULL;\n for (int16_t i = 0; i < num_of_point; i++)\n {\n nad_lcm::route_line_point &point = line[i];\n tmp.transfer.cs.ll2xy(point.lat, point.lon, pp.x, pp.y);\n tmp.center_line.push_back(pp);\n }\n tmp.save_to_osm(\"../log/save_route_kp_to_lcm.osm\",\n line[cur_point_index].lat, line[cur_point_index].lon);*/\n}\n\n//在完整切片列表中选取当前obu前1000,后100米的所有切片,route_section包含一次规划完整的切片信息;other 是周期下发给motion的分段信息\nvoid obu_session_obu::send_segment_section_line(nad_lcm::route_planning_m route_section,nad_lcm::route_planning_m &other)\n{\n //此函数会定时调用更新,每次下发新的路径时首先更新完整切片信息route_section的cur_section,cur_section,mileage_stop的值\n route_section.cur_section = route->cur_sec_index_;\n if(route_section.cur_section < 0) return; ///小于0时,没有在section中进行定位\n //==求得mileage_start\n route_section.mileage_start = route_section.line[route_section.cur_section].lane[0].mileage; //通过路径规划的终点经纬度然后调用自己的定位函数得到终点所在的切片点\n\n //终点的匹配\n double end_dis_min;//匹配终点切片的最短距离\n int end_sec_index = 0;\n int end_lane_index = 0;\n route::center_point end_pt = route->route_behavior_.key_points_[route->route_behavior_.key_points_.size()-1];//最后一个keypoint不一定是终点、、?????\n route->match_section_line_xy(end_pt.x,end_pt.y,end_sec_index,end_lane_index,3,30,end_dis_min);\n if(end_dis_min > 3)\n {\n LOG(ERROR) << \"stop_point out of section line!\" << endl;\n cout << \"stop_point out of section line!\" << endl;\n }\n\n //route_section.line[end_sec_pos].lane[0];最左边边线的里程起点\n //==求得mileage_stop\n route_section.mileage_stop = route_section.line[end_sec_index].lane[0].mileage-route_section.mileage_start;//终点所在的切片点的里程与当前切片点的里程之差就是\n //至此route_planning_m消息填写完毕\n\n route_plan_section_clear(other); //清理发送端\n other.time_stamp = route_section.time_stamp; //时间戳\n other.destination = route_section.destination; //目的地\n other.route_reason = route_section.route_reason; //规划原因\n\n other.mileage_pass = route_section.mileage_pass; //前500m\n other.mileage_next = route_section.mileage_next; //后1000m\n other.mileage_start = route_section.mileage_start;\n other.mileage_stop = route_section.mileage_stop;\n\n //得到当前所在道路信息\n int64_t section_cnt = 0;\n roadmap::lane* route_lane = new roadmap::lane();\n int64_t left_lane_id = route->map_->locate_point_on_lanelet(cur_lat,cur_lon);//left 边界,当前道路的左边界\n route->map_->get_lane_from_map_by_id(left_lane_id,route_lane);\n int lane_size = route_lane->cur_brother_lane.size();\n\n if(lane_size >= 1)\n {\n int64_t right_lane_id = route_lane->cur_brother_lane[lane_size-1];\n vector<point_with_id_t> nodes_left(route->map_->osm_map_->lanelet_by_id(left_lane_id)->nodes(LEFT)); //当前车辆所在的道路lane_id的左边界线\n vector<point_with_id_t> nodes_right(route->map_->osm_map_->lanelet_by_id(right_lane_id)->nodes(RIGHT)); //当前最右边的车道的右边界限\n\n bool is_chosen = false;\n for(int i = 0; i < route_section.num_of_section; i ++)\n {\n nad_lcm::section_m sections;\n memset(§ions,0,sizeof(nad_lcm::section_m));\n for(int j = 0; j < route_section.line[i].num_of_lane; j ++) //num_of_section字段已经填写满\n {\n //向前1000米向后500米\n if((route_section.line[route_section.cur_section].lane[0].mileage - route_section.line[i].lane[j].mileage <= route_section.mileage_pass\n && route_section.line[route_section.cur_section].lane[0].mileage - route_section.line[i].lane[j].mileage >=0)\n || (route_section.line[i].lane[j].mileage - route_section.line[route_section.cur_section].lane[0].mileage <= route_section.mileage_next\n && route_section.line[i].lane[j].mileage - route_section.line[route_section.cur_section].lane[0].mileage >= 0))\n {\n is_chosen = true;//为了不在同一个切片上的多个切片点循环重复添加多次\n {\n sections.lane.push_back(route_section.line[i].lane[j]);\n sections.num_of_lane ++;\n if(route_section.line[route_section.cur_section].lane[j].mileage == route_section.line[i].lane[j].mileage)\n other.cur_section = section_cnt; //修正因提取部分切片后cur_section的值\n }\n }\n }\n\n if(sections.num_of_lane > 0) //切片有点值才push\n {\n other.line.push_back(sections);\n other.num_of_section++;\n section_cnt++; //用于计算截取后cur_section变化后的值\n }\n\n if(is_chosen) //为了不在同一个切片上的多个切片点循环重复添加多次\n {\n if(route->blane_lborder_map_.find(i) != route->blane_lborder_map_.end())\n {\n route::Point_m pt = route->blane_lborder_map_[i];\n nad_lcm::point_xys point_l;\n point_l.x = pt.x;\n point_l.y = pt.y;\n point_l.type = KP_NORMAL;\n other.left_line.line.push_back(point_l);\n other.left_line.num_of_point ++;\n }\n else\n {\n cout << \"sec_num \"<<i<<\" has no left_line point!\"<<endl;\n }\n\n if(route->blane_rborder_map_.find(i) != route->blane_rborder_map_.end())\n {\n route::Point_m pt = route->blane_rborder_map_[i];\n nad_lcm::point_xys point_r;\n point_r.x = pt.x;\n point_r.y = pt.y;\n point_r.type = KP_NORMAL;\n other.right_line.line.push_back(point_r);\n other.right_edge.line.push_back(point_r);\n other.right_line.num_of_point ++;\n other.right_edge.num_of_point ++;\n }\n else\n {\n cout << \"sec_num \"<<i<<\" has no right_line and right_edge point!\"<<endl;\n }\n\n if(route->changelane_lborder_map_.find(i) != route->changelane_lborder_map_.end())\n {\n route::Point_m pt = route->changelane_lborder_map_[i];\n nad_lcm::point_xys point_rever_r;\n point_rever_r.x = pt.x;\n point_rever_r.y = pt.y;\n point_rever_r.type = KP_NORMAL;\n other.left_edge.line.push_back(point_rever_r); //此处增加了left_edge\n other.left_edge.num_of_point ++;\n }\n else\n {\n cout << \"sec_num \"<<i<<\" has no left_edge point!\"<<endl;\n }\n\n is_chosen = false;\n }\n }\n }\n else\n {\n LOG(ERROR) << \"No Route is found!\" << endl;\n cout << \"No Route is found!\" << endl;\n }\n delete route_lane;\n}\n\n//给vui下发路径规划结果\nvoid obu_session_obu::send_ou_route_respond(int retcode, nad_lcm::route_planning route)\n{\n nad_lcm::ou_route_respond ou_route_respond_msg;\n ou_route_respond_msg.obu_name = name;\n ou_route_respond_msg.retcode = retcode;\n ou_route_respond_msg.route = route;//revise\n g_lcm->publish(\"ou_route_respond\", &ou_route_respond_msg);\n LOG_SEND(log_ou_route_respond(&ou_route_respond_msg));\n cout << \"send_ou_route_respond: 已给vui下发路径规划结果\" << endl;\n}\n\n//给vui下发启动自动驾驶结果,也可用于主动下发启动自动驾驶\nvoid obu_session_obu::send_ou_start_auto_respond(int32_t retcode, int32_t start_reason)\n{\n //切换状态\n if (retcode == RET_OK)\n {\n session_status = E_OBU_SESSION_OBU_CONNECT_AUTO;\n start_auto_status = E_OBU_SESSION_OBU_START_AUTO_OK;\n }\n\n //下发切换消息\n nad_lcm::ou_start_auto_respond ou_start_auto_respond_msg;\n ou_start_auto_respond_msg.obu_name = name;\n ou_start_auto_respond_msg.retcode = retcode;\n ou_start_auto_respond_msg.start_reason = start_reason;\n g_lcm->publish(\"ou_start_auto_respond\", &ou_start_auto_respond_msg);\n LOG_SEND(log_ou_start_auto_respond(&ou_start_auto_respond_msg));\n cout << \"send_ou_start_auto_respond: 已给vui下发启动自动驾驶应答\" << endl;\n}\n\n//给vui下发退出自动驾驶结果,也可用于主动下发退出自动驾驶\nvoid obu_session_obu::send_ou_stop_auto_respond(int32_t retcode, int32_t stop_reason)\n{\n //切换状态\n if (retcode == RET_OK)\n {\n session_status = E_OBU_SESSION_OBU_CONNECT_HAND;\n }\n\n //下发切换消息\n nad_lcm::ou_stop_auto_respond ou_stop_auto_respond_msg;\n ou_stop_auto_respond_msg.obu_name = name;\n ou_stop_auto_respond_msg.retcode = retcode;\n ou_stop_auto_respond_msg.stop_reason = stop_reason;\n g_lcm->publish(\"ou_stop_auto_respond\", &ou_stop_auto_respond_msg);\n LOG_SEND(log_ou_stop_auto_respond(&ou_stop_auto_respond_msg));\n cout << \"send_ou_stop_auto_respond: 已给vui下发退出自动驾驶应答\" << endl;\n}\n\n//接收vui上传的uo_route_request\nvoid obu_session_obu::handle_uo_route_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::uo_route_request *msg)\n{\n LOG_RECV(log_uo_route_request(msg));\n\n //路径规划请求消息是有效的消息,并且obu是在线的\n if (msg->destination != \"\" && (session_status == E_OBU_SESSION_OBU_CONNECT_HAND || session_status == E_OBU_SESSION_OBU_CONNECT_AUTO))\n {\n route->destination_ = msg->destination;\n\n //关键点赋值\n vector<route::key_point> key_point_list;\n for( int i = 0; i < msg->key_point_list.size(); i++)\n {\n route::key_point key_point_tmp;\n key_point_tmp.lat= msg->key_point_list[i].lat;\n key_point_tmp.lon= msg->key_point_list[i].lon;\n key_point_tmp.type = msg->key_point_list[i].type;\n key_point_tmp.yaw = msg->key_point_list[i].yaw;\n key_point_list.push_back(key_point_tmp);\n }\n\n ///重新规划开始之后,不再下发切片至motion\n route_plan_status = false;\n\n size_t begin = 0;\n\n int ret = route->routing(key_point_list, ROUTE_REASON_VUI, msg->destination, cur_lat, cur_lon, cur_yaw, begin);\n\n nad_lcm::route_planning tmp_route;\n save_to_lcm(tmp_route);\n //上报日志\n if(ret != RET_OK)\n {\n log_report_to_vui(LOG_INFO, \"路径规划失败\");\n\n if(ret == RET_NOT_EXIST)\n alarm_report_to_vui(ALARM_WARNING, \"路径规划失败,车辆偏离地图!\");\n if(ret == RET_ROUTE_NOT_MATCH)\n alarm_report_to_vui(ALARM_WARNING, \"路径规划失败,无直达路径!\");\n if(ret == RET_ERROR)\n alarm_report_to_vui(ALARM_WARNING, \"路径规划失败!\");\n\n //send_ou_route_respond(ret,tmp_route);\n }\n else\n {\n ///规划成功之后,再下发切片至motion\n route_plan_status = true;\n log_report_to_vui(LOG_INFO, \"路径规划成功\");\n\n //route->lane_to_section_line(cur_lon,cur_lat);\n //send_om_section_line_report(IS_REPLAN);\n\n //绘图\n if(route_draw_flag == true)\n {\n route_draw[0] = route;\n //route_draw[0]->save_section_to_draw(route);\n route_draw_flag = false;\n }\n else\n {\n route_draw[1] = route;\n //route_draw[1]->save_section_to_draw(route);\n route_draw_flag = true;\n }\n\n obu_lcm::back_coordinate_XYH back_coor_msg;\n\n back_coor_msg.length = 6;\n\n //找终点\n for(int i = 0; i < route->route_behavior_.key_points_.size(); i++)\n {\n if(route->route_behavior_.key_points_[i].type[KP_ROUTE_STOP] == 1)\n {\n cout << \"KP_ROUTE_STOP x:\" << route->route_behavior_.key_points_[i].x << \" y:\" << route->route_behavior_.key_points_[i].y << endl;\n back_coor_msg.x = route->route_behavior_.key_points_[i].x;\n back_coor_msg.y = route->route_behavior_.key_points_[i].y;//获得终点坐标\n back_coor_msg.heading = route->route_behavior_.key_points_[i].yaw;\n back_coor_msg.width = route->route_behavior_.key_points_[i].width;\n }\n }\n\n if(route->destination_.find(\"泊车\") != string::npos)\n {\n back_coor_msg.type = 11;\n }\n else\n {\n back_coor_msg.type = 10;\n }\n\n\n g_lcm->publish(\"back_coordinate_XYH\", &back_coor_msg);\n\n //back_coordinate_XYH下发记录\n LOG(ERROR) << \"back_coordinate_XYH start \"<< route->destination_ ;\n\n }\n log_report_to_vui(LOG_INFO, \"请求路径规划回复\");\n\n send_ou_route_respond(ret,tmp_route);\n }\n}\n\n//接收vui上传的uo_start_auto_request\nvoid obu_session_obu::handle_uo_start_auto_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::uo_start_auto_request *msg)\n{\n LOG_RECV(log_uo_start_auto_request(msg));\n if(name == msg->obu_name)\n {\n if(msg->time_stamp != route->time_stamp_) //时戳不一致,直接向obu返回启动自动驾驶失败,不用上报csu\n {\n send_ou_start_auto_respond(RET_ERROR,START_REASON_VUI);\n is_auto_drive = false;\n }\n else\n {\n send_ou_start_auto_respond(RET_OK,START_REASON_VUI);\n is_auto_drive = true;\n }\n }\n log_report_to_vui(LOG_INFO, \"请求启动自动驾驶\");\n}\n\n\n//接收vui上报的uo_stop_auto_request\nvoid obu_session_obu::handle_uo_stop_auto_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::uo_stop_auto_request *msg)\n{\n LOG_RECV(log_uo_stop_auto_request(msg));\n int retcode = RET_OK;\n\n if(name != msg->obu_name)\n {\n retcode = RET_ERROR;\n log_report_to_vui(LOG_INFO, \"退出自动驾驶失败\");\n }\n\n log_report_to_vui(LOG_INFO, \"退出自动驾驶成功\");\n //启动自动驾驶的应答\n send_ou_stop_auto_respond(retcode, STOP_REASON_VUI);\n is_auto_drive = false;\n}\n\n//读取control_info_report\nvoid obu_session_obu::handle_control_info_report(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const obu_lcm::control_info_report *msg)\n{\n #if 0\n //保存controller上报的信息\n cur_lon = msg->cur_lon;\n cur_lat = msg->cur_lat;\n cur_yaw = msg->cur_yaw;\n cur_brake = msg->cur_brake;\n cur_speed = msg->cur_speed * 3.6;\n cur_acceleration = msg->cur_acceleration;\n steering_angle = msg->steering_angle;\n flashing_status = msg->flashing_status;\n cur_gears = msg->cur_gears;\n gps_time = msg->gps_time;\n\n eps_stat = msg->eps_stat;\n epb_stat = msg->epb_stat;\n brake_stat = msg->brake_stat;\n //只有gps是真数据\n if(msg->gps_stat.size() > 0)\n {\n sensor_stat[IDX_GPS_CENTER] = msg->gps_stat[0];\n\n }\n#endif // 0\n cur_lon = msg->lon;\n cur_lat = msg->lat;\n cur_yaw = msg->yaw;\n cur_brake = msg->brake_value;\n cur_speed = msg->vehicle_speed * 3.6;\n cur_acceleration = msg->accel_value;\n steering_angle = msg->steer_angle;\n flashing_status = msg->flashing_status;\n cur_gears = msg->at_status;\n gps_time = msg->gps_time;\n // eps_stat = msg->eps_stat;\n eps_stat = msg->steer_status;\n epb_stat = msg->epb_status;\n brake_stat = msg->brake_value; //只有gps是真数据\n // if(msg->gps_stat.size() > 0)\n // {\n // sensor_stat[IDX_GPS_CENTER] = msg->gps_stat[0];\n //\n // }\n sensor_stat[0] = 0;\n\n //刷新车辆位置\n route->locate_on_lane(cur_lat, cur_lon);\n}\n\n//motionplanner请求换道\nvoid obu_session_obu::handle_mo_change_lane_request(const lcm::ReceiveBuffer* rbuf,\n const std::string& chan, const nad_lcm::mo_change_lane_request *msg)\n{\n LOG_RECV(log_mo_change_lane_request(msg));\n change_lane_ex(msg->direction, msg->reason, msg->starting_lane,\n msg->ending_lane, true, false, true, false, false, true, true);\n\n}\n\n//注册消息到lcm的函数\nvoid obu_session_obu::reg_msg_to_lcm(NE_LCM* lcm)\n{\n //name = g_config.local.name;\n nad_record_obu *rec = db_query_obu(name);\n if (rec == NULL)\n {\n LOG(ERROR) << \"obu_session_obu: 在数据库中查找不到OBU: \" << name.c_str();\n }\n else\n {\n obu_type = rec->obu_type;\n }\n lcm->subscribe(\"uo_route_request\", &obu_session_obu::handle_uo_route_request, this);\n lcm->subscribe(\"uo_start_auto_request\", &obu_session_obu::handle_uo_start_auto_request, this);\n lcm->subscribe(\"uo_stop_auto_request\", &obu_session_obu::handle_uo_stop_auto_request, this);\n lcm->subscribe(\"control_info_report\", &obu_session_obu::handle_control_info_report, this);\n lcm->subscribe(\"mo_change_lane_request\", &obu_session_obu::handle_mo_change_lane_request, this);\n}\n\n//构造析构函数\nobu_session_obu_timer::obu_session_obu_timer() : nad_timer(200)\n{\n}\n\nobu_session_obu_timer::obu_session_obu_timer(int64 interval_ms):nad_timer(interval_ms)\n{\n}\n\nobu_session_obu_timer::~obu_session_obu_timer()\n{\n}\n\n//执行定时器\nvoid obu_session_obu_timer::handle()\n{\n g_obu_planning->obu.handle_timer(counter);\n g_obu_planning->obu.calc_mileage_section_acc(); //路径中切片信息的相关字段更新\n}\n\nvoid obu_session_obu::send_om_section_line_report(int replay_flag)\n{\n if(route->route_motion_info_.num_of_section > 0 && route->cur_sec_index_ > -1)//具有定位信息后,再下发定时的规划信息\n {\n nad_lcm::om_route_respond om_route_respond_msg;\n om_route_respond_msg.replan_flag = replay_flag;\n //发送重规划消息\n nad_lcm::route_planning_m route_m;\n nad_lcm::route_planning_m route_m_draw;\n planning_m_2_lcm(route->route_motion_info_ , route_m);//data->lcm消息\n send_segment_section_line(route_m,om_route_respond_msg.route);\n send_segment_section_line(route_m,route_m_draw); //obu绘制切片用到\n lcm_2_planning_m(route_m_draw,route->route_motion_info_draw_);\n g_lcm->publish(\"om_route_respond\", &om_route_respond_msg);\n }\n}\n\n//分段下发路径\nvoid obu_session_obu::calc_mileage_section_acc() //每隔450m刷新\n{\n double mileage_tmp = 0.0;\n\n if ((route->route_motion_info_.line.size() == 0) || route_plan_status == false)\n {\n return;\n }\n route->locate_position_on_section_ll(cur_lat,cur_lon); //当前所在切片位置更新\n if(route->cur_sec_index_ >= 0)\n {\n mileage_tmp = route->route_motion_info_.line[route->cur_sec_index_].lane[0].mileage;\n }\n ///mileage_section_acc表示上次规划时的里程\n ///mileage_section_send 初始化\n /*std::cout << \"zws: cur_section=\" << route->cur_sec_index_ <<\" mileage_tmp=\"<< mileage_tmp <<\" -----------\"\n << \" route->mileage_section_send_=\"<<route->mileage_section_send_<<endl;*/\n if(route->cur_sec_index_ >= 0 && (mileage_tmp - route->mileage_section_send_ >= 500 || route->mileage_section_send_ < 0))\n {\n if( route->mileage_section_send_ < 0) //第一次下发,每次route,重新下发会清零\n {\n send_om_section_line_report(IS_REPLAN);\n }\n else\n {\n send_om_section_line_report(IS_NOT_REPLAN);\n }\n\n route->mileage_section_send_= mileage_tmp;\n }\n #if 0\n if(route->cur_sec_index_ >= 0)\n {\n cout << \"route->cur_sec_index_:\" << route->cur_sec_index_ << endl;\n mileage_tmp = route->route_motion_info_.line[route->cur_sec_index_].lane[0].mileage;\n /*std::cout << \"cur_section=\"<<cur_sec_pos<<\" mileage_tmp=\"<<mileage_tmp<<\" -----------\"\n << \" route->mileage_section_send=\"<<route->mileage_section_send<<endl;*/\n }\n std::cout << \" mileage_tmp=\"<<mileage_tmp<<\" -----------\"<< \" route->mileage_section_send_=\"<<route->mileage_section_send_\n << endl << \"(mileage_tmp - route->mileage_section_send_)\" << mileage_tmp - route->mileage_section_send_ << endl;\n if(mileage_tmp - route->mileage_section_send_ >= 500) //mileage_section_acc表示上次规划时的里程\n {\n route->mileage_section_send_ = mileage_tmp;\n send_om_section_line_report(IS_NOT_REPLAN);\n }\n #endif // 0\n\n if(route_draw_flag == true) //更新绘图\n {\n route_draw[0] = route;\n //route_draw[0]->save_section_to_draw(route);\n route_draw_flag = false;\n }\n else\n {\n route_draw[1] = route;\n //route_draw[1]->save_section_to_draw(route);\n route_draw_flag = true;\n }\n}\n\nvoid obu_session_obu::route_plan_section_clear(nad_lcm::route_planning_m &route_sec)\n{\n route_sec.left_line.line.clear();\n route_sec.left_line.num_of_point = 0;\n route_sec.left_edge.line.clear();\n route_sec.left_edge.num_of_point = 0;\n route_sec.right_line.line.clear();\n route_sec.right_line.num_of_point = 0;\n route_sec.right_edge.line.clear();\n route_sec.right_edge.num_of_point = 0;\n route_sec.line.clear();\n route_sec.num_of_section = 0;\n}\n\nvoid obu_session_obu::planning_m_2_lcm(route::route_planning_m & planning_m, nad_lcm::route_planning_m &planning_m_lcm)\n{\n planning_m_lcm.time_stamp = planning_m.time_stamp;\n planning_m_lcm.destination = planning_m.destination;\n planning_m_lcm.route_reason = planning_m.route_reason;\n planning_m_lcm.mileage_pass = planning_m.mileage_pass;\n planning_m_lcm.mileage_next = planning_m.mileage_next;\n planning_m_lcm.mileage_start = planning_m.mileage_start;\n planning_m_lcm.mileage_stop = planning_m.mileage_stop;\n planning_m_lcm.num_of_section = planning_m.num_of_section;\n planning_m_lcm.cur_section = planning_m.cur_section;\n planning_m_lcm.left_edge.num_of_point = planning_m.left_edge.num_of_point;\n planning_m_lcm.left_line.num_of_point = planning_m.left_line.num_of_point;\n planning_m_lcm.right_edge.num_of_point = planning_m.right_edge.num_of_point;\n planning_m_lcm.right_line.num_of_point = planning_m.right_line.num_of_point;\n\n for(int i = 0; i < planning_m.num_of_section; i++)\n {\n nad_lcm::section_m sec_m;\n sec_m.num_of_lane = planning_m.line[i].num_of_lane;\n\n for(int j = 0; j < planning_m.line[i].num_of_lane; j++)\n {\n nad_lcm::point_m pt_m;\n pt_m.x = planning_m.line[i].lane[j].x;\n pt_m.y = planning_m.line[i].lane[j].y;\n pt_m.type = planning_m.line[i].lane[j].type;\n pt_m.yaw = planning_m.line[i].lane[j].yaw;\n pt_m.k = planning_m.line[i].lane[j].k;\n pt_m.mileage = planning_m.line[i].lane[j].mileage;\n pt_m.width = planning_m.line[i].lane[j].width;\n pt_m.sug_speed = planning_m.line[i].lane[j].sug_speed;\n sec_m.lane.push_back(pt_m);\n }\n planning_m_lcm.line.push_back(sec_m);\n }\n\n for(int i = 0; i < planning_m.left_edge.line.size(); i++)\n {\n nad_lcm::point_xys pt;\n pt.x = planning_m.left_edge.line[i].x;\n pt.y = planning_m.left_edge.line[i].y;\n pt.type = planning_m.left_edge.line[i].type;\n planning_m_lcm.left_edge.line.push_back(pt);\n }\n\n for(int i = 0; i < planning_m.left_line.line.size(); i++)\n {\n nad_lcm::point_xys pt;\n pt.x = planning_m.left_line.line[i].x;\n pt.y = planning_m.left_line.line[i].y;\n pt.type = planning_m.left_line.line[i].type;\n planning_m_lcm.left_line.line.push_back(pt);\n }\n\n for(int i = 0; i < planning_m.right_edge.line.size(); i++)\n {\n nad_lcm::point_xys pt;\n pt.x = planning_m.right_edge.line[i].x;\n pt.y = planning_m.right_edge.line[i].y;\n pt.type = planning_m.right_edge.line[i].type;\n planning_m_lcm.right_edge.line.push_back(pt);\n }\n\n for(int i = 0; i < planning_m.right_line.line.size(); i++)\n {\n nad_lcm::point_xys pt;\n pt.x = planning_m.right_line.line[i].x;\n pt.y = planning_m.right_line.line[i].y;\n pt.type = planning_m.right_line.line[i].type;\n planning_m_lcm.right_line.line.push_back(pt);\n }\n\n}\n\nvoid obu_session_obu::lcm_2_planning_m(nad_lcm::route_planning_m & planning_m_lcm, route::route_planning_m &planning_m)\n{\n planning_m.time_stamp = planning_m_lcm.time_stamp;\n planning_m.destination = planning_m_lcm.destination;\n planning_m.route_reason = planning_m_lcm.route_reason;\n planning_m.mileage_pass = planning_m_lcm.mileage_pass;\n planning_m.mileage_next = planning_m_lcm.mileage_next;\n planning_m.mileage_start = planning_m_lcm.mileage_start;\n planning_m.mileage_stop = planning_m_lcm.mileage_stop;\n planning_m.num_of_section = planning_m_lcm.num_of_section;\n planning_m.cur_section = planning_m_lcm.cur_section;\n planning_m.left_edge.num_of_point = planning_m_lcm.left_edge.num_of_point;\n planning_m.left_line.num_of_point = planning_m_lcm.left_line.num_of_point;\n planning_m.right_edge.num_of_point = planning_m_lcm.right_edge.num_of_point;\n planning_m.right_line.num_of_point = planning_m_lcm.right_line.num_of_point;\n\n for(int i = 0; i < planning_m_lcm.num_of_section; i++)\n {\n route::section_m sec_m;\n sec_m.num_of_lane = planning_m_lcm.line[i].num_of_lane;\n\n for(int j = 0; j < planning_m_lcm.line[i].num_of_lane; j++)\n {\n route::point_m pt_m;\n pt_m.x = planning_m_lcm.line[i].lane[j].x;\n pt_m.y = planning_m_lcm.line[i].lane[j].y;\n pt_m.type = planning_m_lcm.line[i].lane[j].type;\n pt_m.yaw = planning_m_lcm.line[i].lane[j].yaw;\n pt_m.k = planning_m_lcm.line[i].lane[j].k;\n pt_m.mileage = planning_m_lcm.line[i].lane[j].mileage;\n pt_m.width = planning_m_lcm.line[i].lane[j].width;\n pt_m.sug_speed = planning_m_lcm.line[i].lane[j].sug_speed;\n sec_m.lane.push_back(pt_m);\n }\n planning_m.line.push_back(sec_m);\n }\n\n for(int i = 0; i < planning_m_lcm.left_edge.line.size(); i++)\n {\n route::point_xys pt;\n pt.x = planning_m_lcm.left_edge.line[i].x;\n pt.y = planning_m_lcm.left_edge.line[i].y;\n pt.type = planning_m_lcm.left_edge.line[i].type;\n planning_m.left_edge.line.push_back(pt);\n }\n\n for(int i = 0; i < planning_m_lcm.left_line.line.size(); i++)\n {\n route::point_xys pt;\n pt.x = planning_m_lcm.left_line.line[i].x;\n pt.y = planning_m_lcm.left_line.line[i].y;\n pt.type = planning_m_lcm.left_line.line[i].type;\n planning_m.left_line.line.push_back(pt);\n }\n\n for(int i = 0; i < planning_m_lcm.right_edge.line.size(); i++)\n {\n route::point_xys pt;\n pt.x = planning_m_lcm.right_edge.line[i].x;\n pt.y = planning_m_lcm.right_edge.line[i].y;\n pt.type = planning_m_lcm.right_edge.line[i].type;\n planning_m.right_edge.line.push_back(pt);\n }\n\n for(int i = 0; i < planning_m_lcm.right_line.line.size(); i++)\n {\n route::point_xys pt;\n pt.x = planning_m_lcm.right_line.line[i].x;\n pt.y = planning_m_lcm.right_line.line[i].y;\n pt.type = planning_m_lcm.right_line.line[i].type;\n planning_m.right_line.line.push_back(pt);\n }\n}\n\n//设置默认速度\nvoid obu_session_obu::set_default_speed()\n{\n if (is_auto_drive)\n {\n speed.add(\"no_route\", SV_HIGH, SV_ABSOLUTE, 0.0);\n }\n else\n {\n speed.add(\"hand_drive\", SV_LOW, SV_ABSOLUTE, 20.0);\n }\n}\n\n//速度处理\nvoid obu_session_obu::handle_speed()\n{\n //清空速度规划\n speed.clear();\n\n //不处理没有路径规划的情况\n if (route->route_behavior_.center_line_.size() == 0)\n {\n set_default_speed();\n sug_speed = speed.speed;\n return;\n }\n\n route->locate_on_lane(cur_lat, cur_lon);\n route::center_point &cp = route->route_behavior_.cur_point_;\n //绑定结束,恢复keypoint列表\n route->route_behavior_.get_keypoint_on_center_line();\n route::center_point *ep = route->route_behavior_.get_kp(KP_ROUTE_STOP);\n if (ep == NULL || ep->index <= cp.index)\n {\n\n set_default_speed();\n sug_speed = speed.speed;\n\n return;\n }\n //初始化速度值\n string id = \"\";\n double lim = (double)route->route_behavior_.get_current_limspeed(id);\n ///////////////////////////////////////////////////////add for test/////////////////\n //lim = 30.0;\n if (sub_count(id, \"|\") == 1)\n {\n speed.add(\"limspeed\", SV_LOW, SV_ABSOLUTE, lim);\n }\n else\n {\n speed.add(\"lane_max_speed\", SV_LOW, SV_ABSOLUTE, lim);\n }\n sug_speed = speed.speed;\n}\n\n"
},
{
"alpha_fraction": 0.7931034564971924,
"alphanum_fraction": 0.8103448152542114,
"avg_line_length": 54,
"blob_id": "e7618ad8501c9e1aee561c4861594e7a7f3a8d2f",
"content_id": "1fd3c968b683b6d6680a4407c17cb6afb4792675",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 58,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 1,
"path": "/athena/examples/LCM/Singlecar/control/control_logic/acc/TRUCK_J6P/truck_j6p_torque_speed_throttle_map.h",
"repo_name": "Tubbxl/Athena_Src",
"src_encoding": "UTF-8",
"text": "\n\ndouble TruckJ6pGetAccValue(double speed,double accel);\n\n"
}
] | 244 |
NehaKala/Btech_project_work | https://github.com/NehaKala/Btech_project_work | 49b7dca91c81c2c61103fdfaf95371a476ec7d57 | 12a76fb32b2ee7dab05a0ab5ad3cc4526b1c513d | 107223c92448fcbe9975763c1acbe66817bba55a | refs/heads/master | 2021-01-04T08:17:39.441593 | 2020-02-14T08:43:54 | 2020-02-14T08:43:54 | 240,462,931 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.45371219515800476,
"alphanum_fraction": 0.499541699886322,
"avg_line_length": 17.65517234802246,
"blob_id": "8989bde47da36b733252e33126b5a8af310dc305",
"content_id": "14ab3def5e0cfe78c57fa91ecd937e54d7ee5250",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1091,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 58,
"path": "/inverse_matrix.py",
"repo_name": "NehaKala/Btech_project_work",
"src_encoding": "UTF-8",
"text": "\nfrom copy import deepcopy\n\ndef minor(array,i,j):\n c = array\n c = c[:i] + c[i+1:]\n for k in range(0,len(c)):\n c[k] = c[k][:j]+c[k][j+1:]\n return c\n\ndef det(array,n):\n if n == 1 :return array[0][0]\n if n == 2 :return array[0][0]*array[1][1] - array[0][1]*array[1][0]\n sum = 0\n for i in range(0,n):\n m = minor(array,0,i)\n sum =sum + ((-1)**i)*array[0][i] * det(m,n-1)\n return sum\n\n\n\ny = [[4,2,3,8],[1,4,5,6],[5,3,4,5],[4,9,1,4]]\nn=4\n\nr = deepcopy(y)\n\nfor i in range(4):\n for j in range(4):\n m=minor(y,i,j)\n d=det(m,3)\n r[i][j]=d\nprint('minor_matrix=',r)\n\n\n\ncof=deepcopy(r)\nfor i in range(4):\n for j in range(4):\n c=((-1)**(i+j))*r[i][j]\n cof[i][j]=c\nprint(\"cof=\",cof)\n\nadj=deepcopy(cof)\nfor i in range(4):\n for j in range(4):\n a=cof[j][i]\n adj[i][j]=a\nprint('adj_matrix=',adj) \n\n\ndeterminant=det(y,4)\nprint('Determinant=',determinant)\n\ninv=deepcopy(adj)\nfor i in range(4):\n for j in range(4):\n s=(1/float(determinant))*adj[i][j]\n inv[i][j]=s\nprint('inv=',inv) \n\n \n\n"
}
] | 1 |
kaanersoy12/BMICalculator | https://github.com/kaanersoy12/BMICalculator | b557dec3e37089fa759f3c0b2d64ed263b41ba73 | d7101a3045abfb3febbfcf25af59792328800101 | d1ade78d44da404883703bc223f963ba55e6cb10 | refs/heads/master | 2020-09-14T04:49:49.048549 | 2019-11-28T19:59:04 | 2019-11-28T19:59:04 | 223,022,169 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6549520492553711,
"alphanum_fraction": 0.6741213798522949,
"avg_line_length": 22.076923370361328,
"blob_id": "be6ace3839c51c13440170fc058be08a8dc910cc",
"content_id": "90393110a972015d2e969e355e28e45da6db6a20",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 313,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 13,
"path": "/README.txt",
"repo_name": "kaanersoy12/BMICalculator",
"src_encoding": "UTF-8",
"text": "This is a BMI Caltulator writed by python.\r\n\r\nweight(kg) and height(cm)\r\n\r\nYou can calculate your BMI with a basic way.\r\n\r\nBMI formula is (weight / (height / 100) * (height / 100)) \r\n\r\n**UPDATE**\r\nI am added a function file for python. It is a 'def' file and can usable for all projects. \r\n\r\n\r\nHave a nice dayy.\r\n"
},
{
"alpha_fraction": 0.6265822649002075,
"alphanum_fraction": 0.655063271522522,
"avg_line_length": 40.266666412353516,
"blob_id": "d50f76e4c93bea0abb8c5e2601dab7ca1e8c926b",
"content_id": "4a458e5a3453a34a511a7c6ac0c162376146e78c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 632,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 15,
"path": "/BMI_Calculator.py",
"repo_name": "kaanersoy12/BMICalculator",
"src_encoding": "UTF-8",
"text": "print(\"****Welcome to BMI Calculator****\")\r\nprint(\"Please enter your weight(kg).\")\r\nweight = float(input())\r\nprint(\"Please enter your height(cm).\")\r\nheight = float(input())\r\nBMI = (weight) / ((height/100) ** 2)\r\nprint(\"Your BMI is {}\".format(int(BMI)))\r\nif BMI >= 18.5 and BMI <= 25:\r\n print(\"You are in NORMAL BMI Class.\")\r\nelif BMI > 25 and BMI <= 30:\r\n print(\"You are a little overweight but don't worry it is resolvable.\")\r\nelif BMI > 30:\r\n print(\"You are little obese please consult any doctor in your city.\")\r\nif BMI < 18.5:\r\n print(\"You are underweight for normal people. Please consult any doctor in your city.\")"
},
{
"alpha_fraction": 0.570652186870575,
"alphanum_fraction": 0.591304361820221,
"avg_line_length": 44,
"blob_id": "5504d96633e2880df784df3ea39c22754bd88672",
"content_id": "4f5b36f1becbfa8c0ae892b4e0f7963a0ab07a5e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 920,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 20,
"path": "/bmiCalculatorFunction.py",
"repo_name": "kaanersoy12/BMICalculator",
"src_encoding": "UTF-8",
"text": "def bmi(height=0,weight=0):\r\n height = int(input(\"Please enter your height(cm)..:\"))\r\n weight = int(input(\"Please enter your weight(kg)..:\"))\r\n bmivalue = weight / ((height/100)**2)\r\n if (bmivalue <= 18):\r\n bmivalue = int(bmivalue)\r\n print(\"Your BMI value is..:\", bmivalue)\r\n print(\"You are a little bit weak. Please consult a doctor.\")\r\n elif(bmivalue >18 and bmivalue <=25):\r\n bmivalue = int(bmivalue)\r\n print(\"Your BMI value is..:\", bmivalue)\r\n print(\"It is a normal BMI value.\")\r\n elif(bmivalue > 25 and bmivalue <= 30):\r\n bmivalue = int(bmivalue)\r\n print(\"Your BMI value is..:\", bmivalue)\r\n print(\"You are a little bit overweight. Please consult a doctor.\")\r\n elif(bmivalue > 30):\r\n bmivalue = int(bmivalue)\r\n print(\"Your BMI value is..:\", bmivalue)\r\n print(\"You are obese class 1. Please consult a doctor.\")\r\n"
}
] | 3 |
eduardogpg/important_people | https://github.com/eduardogpg/important_people | f48fb48fb3150d53e30f270e9bde0abc7e0a0868 | cbf02ee9deeccd1f37cb2f30d1436694d376a7f2 | 5f3cfd876c77a7831c42f6b378bc8acb57c6efd9 | refs/heads/master | 2020-12-08T10:49:02.644401 | 2016-09-11T17:38:14 | 2016-09-11T17:38:14 | 67,936,613 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7595419883728027,
"alphanum_fraction": 0.7595419883728027,
"avg_line_length": 28,
"blob_id": "0263d88ecf318ac1167d3f939e7f415f792f940c",
"content_id": "08cc1b9112dd6fdb4e2f1fc6e8ee156ad74f01f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 262,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 9,
"path": "/config.py",
"repo_name": "eduardogpg/important_people",
"src_encoding": "UTF-8",
"text": "import os\n\nclass Config(object):\n\tSECRET_KEY = os.environ.get('SECRET_KEY') or 'my_custome_secret_key'\n\nclass DevelopmentConfig(Config):\n\tDEBUG = True\n\tSQLALCHEMY_DATABASE_URI = 'mysql://root:@localhost/important_people'\n\tSQLALCHEMY_TRACK_MODIFICATIONS = False\n\n"
},
{
"alpha_fraction": 0.7340067625045776,
"alphanum_fraction": 0.739393949508667,
"avg_line_length": 38.105262756347656,
"blob_id": "35df9762ad82f91c64ff866bafa84bcb44880225",
"content_id": "62deb2d9b85fc04be696dcf48b44786cb1018b95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1485,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 38,
"path": "/forms.py",
"repo_name": "eduardogpg/important_people",
"src_encoding": "UTF-8",
"text": "from wtforms import Form\nfrom wtforms import TextField\nfrom wtforms import PasswordField\nfrom wtforms.fields.html5 import EmailField\nfrom wtforms import HiddenField\n\nfrom wtforms import validators\n\nfrom models import User\n\ndef length_honeypot(form, field):\n\tif len(field.data) > 0:\n\t\traise validators.ValidationError('Este campo debe de estar vacio!')\n\nclass LoginForm(Form):\n\tusername = TextField('Username',[validators.Required(message = 'El username es requerido')])\n\tpassword = PasswordField('Password', [validators.Required(message='El password es requerido')])\n\thoneypot = HiddenField(\"\",[ length_honeypot ])\n\nclass CreateForm(Form):\n\tusername = TextField('Username', [\n\t\t\t\t\t\t\tvalidators.Required(message = 'El username es requerido.'),\n\t\t\t\t\t\t\tvalidators.length(min=4, max=25, message='Ingrese un username valido.') ])\n\temail = EmailField('Correo electronico',[\n\t\t\t\t\t\t\tvalidators.Required(message = 'El email es requerido.'),\n\t\t\t\t\t\t\tvalidators.Email(message='Ingre un email valido.'),\n\t\t\t\t\t\t\tvalidators.length(min=4, max=25, message='Ingrese un email valido.') ])\n\tpassword = PasswordField('Password', [validators.Required(message='El password es requerido')])\n\thoneypot = HiddenField(\"\",[ length_honeypot ])\n\n\tdef validate_username(form, field):\n\t \tusername = field.data\n\t \tuser = User.query.filter_by(username = username).first()\n\t \tif user is not None:\n\t \t\traise validators.ValidationError('El username ya se encuentra registrado!')\n\nclass CreateArticleForm(Form):\n\tpass"
},
{
"alpha_fraction": 0.6943972706794739,
"alphanum_fraction": 0.6943972706794739,
"avg_line_length": 27.095237731933594,
"blob_id": "efecc52e95c78d63c68d6d0cbd9d4f0ae6ec583b",
"content_id": "24ea4494114939bea412114a6719f4164430af33",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 589,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 21,
"path": "/common/advanced_message.py",
"repo_name": "eduardogpg/important_people",
"src_encoding": "UTF-8",
"text": "class AdvancedMessage(object):\n\tmessages = []\n\n\tclass Message():\n\t\tdef __init__(self, message = '', type_message ='', identifier=''):\n\t\t\tself.message = message\n\t\t\tself.type_message = type_message\n\t\t\tself.identifier = identifier\n\t\t\tself.create_pre_idetifier()\n\n\t\tdef create_pre_idetifier(self):\n\t\t\tself.pre_idetifier = \"alert {}\".format(self.identifier)\n\n\t@classmethod\n\tdef add(cls, message ='', type_message ='', identifier = ''):\n\t\tnew_message = cls.Message(message, type_message, identifier)\n\t\tcls.messages.append(new_message)\n\n\t@classmethod\n\tdef get_messages(cls):\n\t\treturn cls.messages"
},
{
"alpha_fraction": 0.7291842103004456,
"alphanum_fraction": 0.7367535829544067,
"avg_line_length": 28.725000381469727,
"blob_id": "0d31dbf9db6799b2ed1edb8ba8935c670ff59134",
"content_id": "470ef683921f059a10d862c336fb44c4bc26ab45",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1189,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 40,
"path": "/models.py",
"repo_name": "eduardogpg/important_people",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom flask_sqlalchemy import SQLAlchemy\nfrom werkzeug.security import generate_password_hash\nfrom werkzeug.security import check_password_hash\n\nimport datetime\n\ndb = SQLAlchemy()\n\nclass User(db.Model):\n\t__tablename__ = 'users'\n\n\tid = db.Column(db.Integer, primary_key=True)\n\tusername = db.Column(db.String(25), unique=True)\n\temail = db.Column(db.String(30))\n\tpassword = db.Column(db.String(66))\n\tcreated_date = db.Column(db.DateTime, default=datetime.datetime.now)\n\tupdated_date = db.Column(db.DateTime, default=datetime.datetime.now)\n\n\tdef __init__(self, username, password, email):\n\t\tself.username = username\n\t\tself.password = self.__create_pasword(password)\n\t\tself.email = email\n\n\tdef __create_pasword(self, password):\n\t\treturn generate_password_hash(password)\n\n\tdef verify_password(self, password):\n\t\treturn check_password_hash(self.password, password)\n\n\nclass Article(db.Model):\n\t__tablename__ = 'articles'\n\n\tid = db.Column(db.Integer, primary_key=True)\n\ttitle = db.Column(db.String(50))\n\tcontent = db.Column(db.Text)\n\tcreated_date = db.Column(db.DateTime, default=datetime.datetime.now)\n\tupdated_date = db.Column(db.DateTime, default=datetime.datetime.now)\n"
},
{
"alpha_fraction": 0.7282487750053406,
"alphanum_fraction": 0.7286190390586853,
"avg_line_length": 26.272727966308594,
"blob_id": "0f5e0bdc1ce6d2f044ed70d1ebc209d093058cfb",
"content_id": "792eea50de75d6f49d308b20c0d681ec4dfc80a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2701,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 99,
"path": "/manage.py",
"repo_name": "eduardogpg/important_people",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n__author__ = 'Eduardo Ismael García Pérez'\n__contact__ = '@eduardo_gpg'\n\nfrom flask import Flask\nfrom flask import render_template\nfrom flask import request\nfrom flask import session\nfrom flask import redirect\nfrom flask import url_for\nfrom flask import flash\nfrom flask import Markup\n\nfrom flask_wtf.csrf import CsrfProtect\n\nfrom config import DevelopmentConfig\nfrom forms import LoginForm\nfrom forms import CreateForm\n\nfrom models import db as database\nfrom models import User\n\nfrom common import AdvancedMessage\n\napp = Flask(__name__)\napp.config.from_object(DevelopmentConfig)\ncsrf = CsrfProtect()\n\[email protected]_request\ndef after_request(response):\n\treturn response\n\ndef create_session(username, user_id):\n\tsession['username'] = username\n\tsession['id'] = user_id\n\ndef success_authentication(request, user):\n\tcreate_session(user.username, user.id)\n\tsuccess_message = 'Bienvenido a la plataforma {}'.format(user.username)\n\tflash(success_message)\n\treturn redirect(url_for('dashboard'))\n\[email protected]('/', methods = ['GET'])\ndef index():\n\treturn render_template('index.html')\n\[email protected]('/user/new', methods = ['GET', 'POST'])\ndef user_new():\n\tcreate_form = CreateForm(request.form)\n\tif request.method == 'POST' and create_form.validate():\n\t\tusername = create_form.username.data\n\t\tpassword = create_form.password.data\n\t\temail = create_form.email.data\n\n\t\tuser = User(username, password, email)\n\n\t\tdatabase.session.add(user)\n\t\tdatabase.session.commit()\n\t\treturn success_authentication(request,user)\n\n\treturn render_template('user/new.html', form = create_form)\n\[email protected]('/login', methods = ['GET','POST'])\ndef login():\n\tlogin_form = LoginForm(request.form)\n\tif request.method == 'POST' and login_form.validate():\n\t\tusername = login_form.username.data\n\t\tpassword = login_form.password.data\n\t\t\n\t\tuser = User.query.filter_by(username = username).first()\n\t\tif user is not None and user.verify_password(password):\n\t\t\treturn success_authentication(request, user)\n\t\telse:\n\t\t\terror_message = 'Usuario o password incorrectos.'\n\t\t\tAdvancedMessage.add(message = error_message, identifier = 'alert-danger')\n\n\treturn render_template('login.html', form = login_form, messages = AdvancedMessage.get_messages())\n\[email protected]('/logout', methods = ['GET'])\ndef logout():\n\tsession.pop('username', None)\n\treturn redirect(url_for('login'))\n\[email protected]('/dashboard', methods = ['GET'])\ndef dashboard():\n\tusername = session['username']\n\tis_authenticated = True\n\treturn render_template('user/dashboard.html', username = username, is_authenticated = is_authenticated)\n\nif __name__ == '__main__':\n\tcsrf.init_app(app)\n\tdatabase.init_app(app)\n\n\twith app.app_context():\n\t\tdatabase.create_all()\n\n\tapp.run()\n\n"
}
] | 5 |
weiss1217/hideTwi | https://github.com/weiss1217/hideTwi | b0497b0d8573aa1e3f32dc0ac4709bed5c0f4c27 | b199d752d94cef954c1ab99950fb67ccb5ef8142 | 5d517b6d59071d1b1e593ec1c151c71c9d160962 | refs/heads/master | 2023-02-16T06:04:30.985634 | 2020-12-25T06:21:38 | 2020-12-25T06:21:38 | 324,093,275 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.43661972880363464,
"alphanum_fraction": 0.6478873491287231,
"avg_line_length": 15.75,
"blob_id": "13fea360cbd9cc45cc05f5daefb94c0e785ef32b",
"content_id": "d674b71fb254e80bacf75ce022deec53adf8e7e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 71,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 4,
"path": "/requirements.txt",
"repo_name": "weiss1217/hideTwi",
"src_encoding": "UTF-8",
"text": "requests==2.22.0\r\nrequests_oauthlib==1.3.0\r\nPyQt5==5.15.2\r\nsip==5.5.0\r\n"
},
{
"alpha_fraction": 0.5185661315917969,
"alphanum_fraction": 0.5370679497718811,
"avg_line_length": 31.331905364990234,
"blob_id": "f27450c5df7ad1b14236bcaae307b8ab828a8992",
"content_id": "4684cc36d8c5d9f01ff4a6c82e05990dc34f8a61",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16732,
"license_type": "no_license",
"max_line_length": 173,
"num_lines": 467,
"path": "/hideTwi.py",
"repo_name": "weiss1217/hideTwi",
"src_encoding": "UTF-8",
"text": "#-------------------------------------------------------------------------------\r\n# Name: hideTwi\r\n# Purpose:\r\n#\r\n# Author: T\r\n#\r\n# Created: 24/12/2020\r\n# Copyright: (c) T 2020\r\n# Licence: <your licence>\r\n#-------------------------------------------------------------------------------\r\n\r\nimport json, config #標準のjsonモジュールとconfig.pyの読み込み\r\nfrom requests_oauthlib import OAuth1Session #OAuthのライブラリの読み込み\r\nfrom urllib.parse import parse_qsl\r\nimport requests\r\nimport codecs\r\nimport os\r\nimport sys\r\nfrom time import sleep\r\nimport urllib.request as urlreq\r\nimport datetime\r\nimport threading\r\nimport base64\r\nimport webbrowser\r\n\r\nfrom PyQt5.QtCore import *\r\nfrom PyQt5.QtWidgets import *\r\nimport sip\r\nimport ast\r\n\r\n#アクセスURLのエンドポイント設定\r\nurl1 = \"https://api.twitter.com/1.1/statuses/home_timeline.json\" #タイムライン取得エンドポイント\r\nurl2 = \"https://api.twitter.com/1.1/statuses/update.json\" #ツイートポストエンドポイント\r\nurl3 = \"https://api.twitter.com/1.1/favorites/create.json\"\r\nurl4 = \"https://upload.twitter.com/1.1/media/upload.json\" #画像投稿\r\nurl5 = \"https://api.twitter.com/oauth/request_token\"\r\nurl6 = \"https://api.twitter.com/oauth/authenticate\"\r\nurl7 = \"https://api.twitter.com/oauth/access_token\"\r\n\r\n#httpリクエストのための設定\r\nCK = \"\"\r\nCS = \"\"\r\nAT = \"\"\r\nATS = \"\"\r\ntwitter = \"\"\r\n\r\nimage_list = []\r\ndelete_index = []\r\nimage_num = 0\r\n\r\nalpha_rate = config.MAIN_ALPHA\r\nimage_alpha_rate = config.IMAGE_ALPHA\r\n\r\nclass ImageWindow(QWidget):\r\n def __init__(self, parent=None):\r\n super(ImageWindow, self).__init__(parent)\r\n\r\n #メインウィンドウの設定\r\n self.w = 1000\r\n self.h = 480\r\n self.resize(self.w, self.h)\r\n self.setMinimumSize(self.w/2, self.h/2)\r\n self.widthFactor = 1\r\n self.heightFactor = 1\r\n self.setWindowTitle('画像一覧')\r\n self.setStyleSheet(\"background-color: \" + config.IMAGE_COLOR + \";\")\r\n self.setWindowOpacity(image_alpha_rate)\r\n self.label_list = []\r\n self.button_list = []\r\n self.image_display()\r\n\r\n def image_display(self):\r\n if len(image_list) == 0:\r\n return\r\n\r\n for i in range(len(image_list)):\r\n #Tweetラベルの追加\r\n self.label_list.append(QLabel(self))\r\n self.label_list[i].move(50 , 40 * (i + 1))\r\n self.label_list[i].setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">' + image_list[i] + '</font></p>')\r\n\r\n #削除ボタンの追加\r\n self.button_list.append(QPushButton('削除', self))\r\n\r\n if i == 0:\r\n self.button_list[i].clicked.connect(lambda: self.delete_image(0))\r\n elif i == 1:\r\n self.button_list[i].clicked.connect(lambda: self.delete_image(1))\r\n elif i == 2:\r\n self.button_list[i].clicked.connect(lambda: self.delete_image(2))\r\n elif i == 3:\r\n self.button_list[i].clicked.connect(lambda: self.delete_image(3))\r\n\r\n self.button_list[i].resize(100, 30)\r\n self.button_list[i].setStyleSheet(\"background-color: #FFFFFF;\")\r\n self.button_list[i].move(900, 40 * (i + 1))\r\n\r\n\r\n\r\n def delete_image(self, index:int):\r\n global image_num\r\n global image_list\r\n print(index)\r\n\r\n minus_index = 0\r\n\r\n if index == 0:\r\n minus_index = 0\r\n\r\n elif index == 1:\r\n if 0 in delete_index:\r\n minus_index += 1\r\n\r\n elif index == 2:\r\n if 0 in delete_index:\r\n minus_index += 1\r\n\r\n if 1 in delete_index:\r\n minus_index += 1\r\n\r\n elif index == 3:\r\n if 0 in delete_index:\r\n minus_index += 1\r\n\r\n if 1 in delete_index:\r\n minus_index += 1\r\n\r\n if 2 in delete_index:\r\n minus_index += 1\r\n\r\n image_list.pop(index - minus_index )\r\n delete_index.append(index)\r\n image_num -= 1\r\n\r\n self.label_list[index].hide()\r\n self.button_list[index].hide()\r\n\r\n main_window.update_image_num()\r\n\r\nclass MainWindow(QWidget):\r\n progressChanged = pyqtSignal(int)\r\n def __init__(self, parent=None):\r\n super(MainWindow, self).__init__(parent)\r\n\r\n self.get_key()\r\n self.oauth()\r\n\r\n #メインウィンドウの設定\r\n self.w = 1280\r\n self.h = 300\r\n self.resize(self.w, self.h)\r\n self.setMinimumSize(self.w/2, self.h/2)\r\n self.widthFactor = 1\r\n self.heightFactor = 1\r\n self.setWindowTitle('ついったーするやつ')\r\n self.setStyleSheet(\"background-color: \" + config.IMAGE_COLOR + \";\")\r\n self.setWindowOpacity(alpha_rate)\r\n\r\n #ツイート関連の表示ウィジェットの設定\r\n self.tweet_init()\r\n\r\n #ハッシュタグ保存機能の表示ウィジェットの設定\r\n self.hash_init()\r\n\r\n #透過率変更機能ウィジェットの設定\r\n self.alpha_change_init()\r\n\r\n def get_key(self):\r\n global CK\r\n global CS\r\n self.get_response = requests.get('https://mythos.pythonanywhere.com/twitter/request_key')\r\n key_token = ast.literal_eval(self.get_response.content.decode(\"utf-8\"))\r\n\r\n CK = key_token[\"CK\"]\r\n CS = key_token[\"CS\"]\r\n\r\n def oauth(self):\r\n\r\n self.request_response = requests.get('https://mythos.pythonanywhere.com/twitter/request_token?oauth_callback=https://mythos.pythonanywhere.com/twitter/access_token')\r\n request_token = ast.literal_eval(self.request_response.content.decode(\"utf-8\"))\r\n authenticate_endpoint = \"https://api.twitter.com/oauth/authenticate?oauth_token=\" + request_token[\"oauth_token\"]\r\n\r\n webbrowser.open(authenticate_endpoint)\r\n\r\n def get_AT(self):\r\n global AT\r\n global ATS\r\n self.oauth_response = requests.get('https://mythos.pythonanywhere.com/twitter/oauth')\r\n request_token = ast.literal_eval(self.oauth_response.content.decode(\"utf-8\"))\r\n AT = request_token[\"AT\"]\r\n ATS = request_token[\"ATS\"]\r\n\r\n def tweet_init(self):\r\n #Tweetラベルの追加\r\n self.lbl = QLabel(self)\r\n self.lbl.move(50, 10)\r\n self.lbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">呟く内容を書けよ</font></p>')\r\n\r\n # ツイートTextBoxの追加\r\n self.textbox = QTextEdit(self)\r\n self.textbox.move(40, 40)\r\n self.textbox.setStyleSheet(\"background-color: #FFFFFF;\")\r\n\r\n # ツイートボタンの追加\r\n self.tweetbutton = QPushButton('tweet', self)\r\n self.tweetbutton.clicked.connect(self.tweet)\r\n self.tweetbutton.resize(100, 30)\r\n self.tweetbutton.setStyleSheet(\"background-color: #FFFFFF;\")\r\n\r\n # 画像添付ボタンの追加\r\n self.imagebutton = QPushButton('画像添付', self)\r\n self.imagebutton.clicked.connect(self.add_image)\r\n self.imagebutton.resize(100, 30)\r\n self.imagebutton.setStyleSheet(\"background-color: #FFFFFF;\")\r\n\r\n # 添付画像ラベルの追加\r\n self.imagelbl = QLabel(self)\r\n self.imagelbl.move(50, 125)\r\n self.imagelbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">添付画像数 : ' + str(image_num) + '</font></p>')\r\n\r\n # 画像一覧ボタンの追加\r\n self.listbutton = QPushButton('画像一覧', self)\r\n self.listbutton.clicked.connect(self.list_image)\r\n self.listbutton.resize(100, 30)\r\n self.listbutton.move(180, 118)\r\n self.listbutton.setStyleSheet(\"background-color: #FFFFFF;\")\r\n\r\n\r\n def hash_init(self):\r\n #ハッシュタグラベルの追加\r\n self.hashlbl = QLabel(self)\r\n self.hashlbl.move(50, 170)\r\n self.hashlbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">保存したい画像のハッシュタグを書けよ</font></p>')\r\n\r\n #保存時ふぁぼ機能チェックボックスの追加\r\n self.hashcheckbox = QCheckBox(\"ふぁぼりてぇCheckBox\", self)\r\n self.hashcheckbox.move(340, 170)\r\n self.hashcheckbox.setChecked(False)\r\n\r\n # ハッシュタグTextBoxの追加\r\n self.hashbox = QLineEdit(self)\r\n self.hashbox.move(40, 200)\r\n self.hashbox.setStyleSheet(\"background-color: #FFFFFF;\")\r\n\r\n # 保存ボタンの追加\r\n self.savebutton = QPushButton('保存', self)\r\n self.savebutton.clicked.connect(self.save_hash)\r\n self.savebutton.resize(100, 30)\r\n self.savebutton.setStyleSheet(\"background-color: #FFFFFF;\")\r\n\r\n #保存件数表示ラベルの追加\r\n self.savelbl = QLabel(self)\r\n self.savelbl.move(50, 240)\r\n self.savelbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">保存件数 : </font></p>')\r\n self.savelbl.setVisible(False);\r\n\r\n self.progressChanged.connect(self.visible_hash)\r\n\r\n def visible_hash(self, count):\r\n self.savelbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">保存件数 :' + str(count) + ' 件 </font></p>')\r\n self.savelbl.setVisible(True);\r\n\r\n t=threading.Thread(target=self.invisible_hash)\r\n t.start()\r\n\r\n def invisible_hash(self):\r\n sleep(5)\r\n self.savelbl.setVisible(False);\r\n\r\n def alpha_change_init(self):\r\n self.slider = QSlider(Qt.Horizontal, self)\r\n self.slider.setFocusPolicy(Qt.NoFocus)\r\n self.slider.valueChanged[int].connect(self.alpha_change)\r\n\r\n def alpha_change(self, value):\r\n global alpha_rate\r\n alpha_rate = 0.2 + value / 100 * 0.8\r\n image_alpha_rate = 0.2 + value / 100 * 0.8\r\n self.setWindowOpacity(alpha_rate)\r\n\r\n def resizeEvent(self, event):\r\n self.widthFactor = self.rect().width() / 1280\r\n self.heightFactor = self.rect().height()/ 300\r\n\r\n #ツイート機能ウィジェットの自動調整\r\n self.textbox.resize(self.w*self.widthFactor*0.85, 70)\r\n self.tweetbutton.move(40 + 30 + self.w*self.widthFactor*0.85, 80)\r\n self.imagebutton.move(40 + 30 + self.w*self.widthFactor*0.85, 40)\r\n\r\n #ハッシュタグ機能ウィジェットの自動調整\r\n self.hashbox.resize(self.w*self.widthFactor*0.85,30)\r\n self.savebutton.move(40 + 30 + self.w*self.widthFactor*0.85, 200)\r\n\r\n #透過率調整つまみの自動調整\r\n self.slider.move(self.w*self.widthFactor - 130, self.h*self.heightFactor - 40)\r\n\r\n super(MainWindow, self).resizeEvent(event)\r\n\r\n def tweet(self):\r\n global image_num\r\n global image_list\r\n image_res_list = []\r\n media_id_list = []\r\n self.get_AT()\r\n twitter = OAuth1Session(CK, CS, AT, ATS) #認証処理\r\n tweet = self.textbox.toPlainText()\r\n\r\n if image_num != 0:\r\n for i in range(len(image_list)):\r\n\r\n b64 = base64.encodestring(open(image_list[i], 'rb').read())\r\n\r\n #画像投稿\r\n files = {\"media\" : b64}\r\n res_image = twitter.post(url4, params = files) #post送信\r\n\r\n if res_image.status_code != 200:\r\n print (\"画像をアップロードできませんでした。: \", res_image.status_code, res_image.text )\r\n else:\r\n image_res_list.append(res_image)\r\n\r\n\r\n for i in range(len(image_res_list)):\r\n media_id_list.append(json.loads(image_res_list[i].text)['media_id'])\r\n\r\n if image_num != 0:\r\n if len(image_res_list) == 0:\r\n print(\"画像投稿失敗\")\r\n image_list\r\n return\r\n else:\r\n params = {\"status\" : tweet, \"media_ids\": media_id_list}\r\n\r\n else:\r\n params = {\"status\" : tweet}\r\n\r\n res = twitter.post(url2, params = params) #post送信\r\n\r\n if res.status_code == 200: #正常投稿出来た場合\r\n print(\"tweet success\")\r\n self.textbox.setText(\"\")\r\n else: #正常投稿出来なかった場合\r\n print(\"Failed. : %d\"% res.status_code)\r\n\r\n image_list.clear()\r\n image_num = 0\r\n self.update_image_num()\r\n\r\n def add_image(self):\r\n\r\n global image_num\r\n\r\n if image_num > 3:\r\n return\r\n\r\n path = os.getcwd()\r\n\r\n\r\n input_image_path = QFileDialog.getOpenFileName(\r\n QFileDialog(), caption=\"入力画像\", directory=path, filter=\"*.png *.jpg\")[0]\r\n\r\n if input_image_path != \"\":\r\n image_list.append(input_image_path)\r\n image_num += 1\r\n self.imagelbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">添付画像数 : ' + str(image_num) + '</font></p>')\r\n\r\n if image_num > 3:\r\n self.imagelbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">添付画像数 : ' + str(image_num) + ' (MAX) </font></p>')\r\n\r\n def list_image(self):\r\n image_window = ImageWindow()\r\n image_window.show()\r\n\r\n def update_image_num(self):\r\n self.imagelbl.setText('<p><font size=\"4\" color=\"' + config.PHONT_COLOR + '\">添付画像数 : ' + str(image_num) + '</font></p>')\r\n\r\n\r\n #ハッシュタグを自動保存する\r\n def save_hash(self):\r\n self.get_AT()\r\n twitter = OAuth1Session(CK, CS, AT, ATS)\r\n hash = self.hashbox.text()\r\n hash = hash.strip()\r\n t=threading.Thread(target=self.save_hash_thread,args = (hash,))\r\n t.start()\r\n\r\n def save_hash_thread(self, hash):\r\n if hash[:1] != \"#\":\r\n hash = \"#\" + hash\r\n\r\n query = hash + ' filter:images min_faves:0 exclude:retweets'\r\n\r\n hash = hash[1:]\r\n\r\n if config.IMAGE_DIRECTORY == \"\":\r\n save_dir = \"./\" + hash\r\n else:\r\n if not os.path.exists(config.IMAGE_DIRECTORY):\r\n print(\"指定したディレクトリは存在しません。\")\r\n return\r\n else:\r\n save_dir = config.IMAGE_DIRECTORY + \"\\\\\" + hash\r\n\r\n if not os.path.exists(save_dir):\r\n os.makedirs(save_dir)\r\n\r\n params = {\"q\": query, \"count\": 200}\r\n\r\n url = 'https://api.twitter.com/1.1/search/tweets.json'\r\n twitter = OAuth1Session(CK, CS, AT, ATS) #認証処理\r\n req = twitter.get(url, params=params)\r\n\r\n result = []\r\n if req.status_code == 200:\r\n tweets = json.loads(req.text)\r\n result = tweets['statuses']\r\n\r\n else:\r\n print(\"ERROR!: %d\" % req.status_code)\r\n return;\r\n\r\n save_count = 0\r\n for tweet in result:\r\n name = tweet['user']['screen_name']\r\n date = tweet['created_at']\r\n date = date.replace(\" +0000\",\"\")\r\n date = date.replace(\" \",\"-\")\r\n date = date.replace(\":\",\".\")\r\n count = 0\r\n try:\r\n media_list = tweet['extended_entities']['media']\r\n for img in media_list:\r\n count += 1\r\n img_url = img['media_url']\r\n path = save_dir + \"/[\" + str(name) + \"]_\" + str(date) + \"_\" + str(count) + \".jpg\"\r\n print(path)\r\n if os.path.exists(path):\r\n print(\"重複のため保存しませんでした\")\r\n else:\r\n tweet_id = tweet[\"id\"]\r\n params = {\"id\": tweet_id}\r\n #print(\"id取得\" + str(tweet_id))\r\n if self.hashcheckbox.isChecked():\r\n res = twitter.post(url3, params=params) #ふぁぼ\r\n if res.status_code == 200: #正常投稿出来た場合\r\n print(\"Favorite Success.\")\r\n else: #正常投稿出来なかった場合\r\n print(\"Failed. : %d\"% res.status_code)\r\n\r\n urlreq.urlretrieve(img_url, path)\r\n print(\"画像を保存しました\", img_url)\r\n save_count += 1\r\n print(\"-・\"*30)\r\n except Exception as e:\r\n print(\"画像を取得できませんでした\")\r\n print(e)\r\n print(\"-・\"*30)\r\n\r\n self.progressChanged.emit(save_count)\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n app = QApplication(sys.argv)\r\n main_window = MainWindow()\r\n main_window.show()\r\n sys.exit(app.exec_())\r\n"
},
{
"alpha_fraction": 0.6878849864006042,
"alphanum_fraction": 0.6981519460678101,
"avg_line_length": 11.55555534362793,
"blob_id": "4dddc306077ad4576ddb173a55c7f9f5be2e8981",
"content_id": "f5faef7d391574fb0945454f8366576e60f1dd7a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 927,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 36,
"path": "/README.md",
"repo_name": "weiss1217/hideTwi",
"src_encoding": "SHIFT_JIS",
"text": "# ついったーするやつ(クソデカ文字)\r\n## Abstract\r\nついったーするやつです。更新頑張ります。\r\n\r\n## Environment\r\npython3.6以上\r\n\r\n## Installation\r\n```\r\npip install -r requirements.txt\r\n```\r\n\r\n## Execute\r\n```\r\npython hideTwi.py\r\n```\r\n\r\n## configuration changes\r\nconfig.pyの中身を変更してください。\r\n現状では以下を変更できます。\r\n```\r\n・テーマカラー\r\n・フォントカラー\r\n・透過率\r\n・画像保存ディレクトリ\r\n```\r\n\r\n## HOW TO USE\r\n現状での機能は下記のみとなります。\r\n```\r\n・ツイート(画像付き)\r\n・ハッシュタグ自動保存\r\n テキストボックス内にハッシュタグを入力して保存を押すと最大200枚まで自動的に保存を行います。\r\n ふぁぼりてぇCheckBoxにチェックを入れると保存と同時にふぁぼります。\r\n・スライダによる画面透過率の変更\r\n```\n"
},
{
"alpha_fraction": 0.602787435054779,
"alphanum_fraction": 0.6550522446632385,
"avg_line_length": 17.266666412353516,
"blob_id": "028e53ca848bd13d156443ee1354437c3f36479b",
"content_id": "0e748a52abc00ee5a7bb07b06552cbce612f48fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 383,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 15,
"path": "/config.py",
"repo_name": "weiss1217/hideTwi",
"src_encoding": "UTF-8",
"text": "#メインウィンドウ透過率\r\nMAIN_ALPHA = 0.5\r\n\r\n#画像ウィンドウ透過率\r\nIMAGE_ALPHA = 0.5\r\n\r\n#テーマカラー(color code 若しくは color name)\r\n#IMAGE_COLOR = \"aliceblue\" # or \"#f0f8ff\"\r\nIMAGE_COLOR = \"darkgray\" # or \"#a9a9a9\"\r\n\r\n#フォントカラー\r\nPHONT_COLOR = \"#000000\"\r\n\r\n#画像保存ディレクトリ\r\nIMAGE_DIRECTORY = \"\" # or \"D:\\hoge\\huga\\photo\""
}
] | 4 |
shakkeelbhat/HSVmask | https://github.com/shakkeelbhat/HSVmask | f5ed36f90fd67ba158503c9c09a118352d248632 | af113435cc792599109152cd4da1b1ce429cfbb9 | b59cbf25a206d743827faf9a63e1746bfe4875bd | refs/heads/master | 2022-11-23T13:43:16.951143 | 2020-08-02T07:55:22 | 2020-08-02T07:55:22 | 284,417,137 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7954545617103577,
"alphanum_fraction": 0.7954545617103577,
"avg_line_length": 21,
"blob_id": "99c7f4d7c9dc54ff1462337738023b47414a9801",
"content_id": "796284f2c65cb1addc1c04873daefb4075a38088",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 44,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 2,
"path": "/README.md",
"repo_name": "shakkeelbhat/HSVmask",
"src_encoding": "UTF-8",
"text": "# HSVmask\nMask to filter colors from images\n"
},
{
"alpha_fraction": 0.5839112401008606,
"alphanum_fraction": 0.6816921234130859,
"avg_line_length": 29.04166603088379,
"blob_id": "59fb6f7a8533511c6d36f3985cd361e38b404165",
"content_id": "e22405fe353205f5bb0a04f7122f74987a578310",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1442,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 48,
"path": "/hsvMask.py",
"repo_name": "shakkeelbhat/HSVmask",
"src_encoding": "UTF-8",
"text": "import cv2 \nimport numpy as np\nimport math\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import hsv_to_rgb\n\nimg = cv2.imread(\"input_image.jpg\")\n# img = np.flip(img,axis=1)\nimgHsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)\n\nsensitivity=20\nlower_green = np.array([60 - sensitivity, 100, 100])#(50, 100, 60)#'hsl(%d, %d%%, %d%%)' % (120, 50, 25) #hsla(120, 50%, 25%, 1)\nupper_green = np.array([60 + sensitivity, 255, 255])\nmask1 = cv2.inRange(imgHsv, lower_green, upper_green)\n\n\n# lower_green2 = np.array([72,52,72])\n# upper_green2= np.array([97,255,255])\n# mask2 = cv2.inRange(imgHsv, lower_green2, upper_green2)\n# print(mask2[10])\n# cv2.imwrite('/home/salah/Desktop/1003/mask2.jpeg',mask2)\n\n# mask1 = mask1+mask2\n# print(mask1[10])\n# mask2 = cv2.morphologyEx(mask1, cv2.MORPH_OPEN, np.ones((,2),np.uint8))\nmaskx = cv2.morphologyEx(mask1, cv2.MORPH_ERODE, np.ones((2,2),np.uint8))\nmask2 = cv2.morphologyEx(mask1, cv2.MORPH_DILATE, np.ones((4,4),np.uint8))\n# mask2 = cv2.morphologyEx(mask1, cv2.MORPH_CLOSE,np.ones((3,3),np.uint8))\n\nmask3 = mask1+mask2+maskx\n# print('mask2',mask2[10])\n# mask3 = cv2.bitwise_not(mask2)\n\n# print(mask3[10])\ndef myfunc(img,mask):\n\tgreen = np.zeros_like(img, np.uint8)\n\tfor i,dim1 in enumerate(mask):\n\t\tfor j,dim2 in enumerate(dim1):\n\t\t\tif mask[i][j]>0:\n\t\t\t\tgreen[i][j]=np.array([255,255,255])\n\t\t\telse:\n\t\t\t\tgreen[i][j]=img[i][j]\n\treturn green\nres1=myfunc(img,mask3)\n\n\n\ncv2.imwrite('output_image.jpeg',res1)\n"
}
] | 2 |
ArielCoulson/FlowersDatabase | https://github.com/ArielCoulson/FlowersDatabase | 906ad536641d32f87997d9121b51bcf567bd7ec5 | 10f6121fe4fa38982b7fe6ba3acbd25ec4330db3 | 24f47a626d6197515be80fb74a0d12b46d47ae68 | refs/heads/master | 2020-04-09T19:40:13.566217 | 2019-01-25T00:23:50 | 2019-01-25T00:23:50 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5765414834022522,
"alphanum_fraction": 0.5776045322418213,
"avg_line_length": 25.885713577270508,
"blob_id": "23cf379080eb10748d935e0145269cb3daa604a7",
"content_id": "778f0b045ca27256ef4065517a4163165a3fdf51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2822,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 105,
"path": "/server.py",
"repo_name": "ArielCoulson/FlowersDatabase",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template, request\nimport sqlite3 as sql\napp = Flask(__name__)\n\[email protected]('/')\ndef list():\n con = sql.connect(\"flowers.db\")\n con.row_factory = sql.Row\n \n cur = con.cursor()\n cur.execute(\"select * from FLOWERS\")\n \n rows = cur.fetchall();\n return render_template(\"list.html\",rows = rows)\n\[email protected]('/enternew')\ndef new_flower():\n con = sql.connect(\"flowers.db\")\n con.row_factory = sql.Row\n \n cur = con.cursor()\n cur.execute(\"select * from FLOWERS\")\n rows = cur.fetchall();\n\n return render_template('insertFlower.html', rows=rows)\n\[email protected]('/flowerSight/', methods=['GET', 'POST'])\ndef flowerSight():\n\n commonName = request.form['COMNAME']\n con = sql.connect(\"flowers.db\")\n con.row_factory = sql.Row\n\n \n cur = con.cursor()\n cur.execute(\"SELECT * FROM SIGHTINGS WHERE NAME = ? order by SIGHTED desc LIMIT 10\", (commonName,))\n \n rows = cur.fetchall();\n return render_template('recentSightings.html', rows=rows)\n\[email protected]('/editFlower/', methods=['GET', 'POST'])\ndef editFlower():\n\n commonName = request.form['COMNAME']\n con = sql.connect(\"flowers.db\")\n con.row_factory = sql.Row\n\n \n cur = con.cursor()\n cur.execute(\"SELECT * FROM FLOWERS WHERE COMNAME = ?\", (commonName,))\n \n rows = cur.fetchall();\n return render_template('edit.html', rows=rows)\n\[email protected]('/addrec/',methods = ['POST', 'GET'])\ndef addrec():\n if request.method == 'POST':\n try:\n fname = request.form['FNAME']\n pname = request.form['PNAME']\n l = request.form['LOCATION']\n s = request.form['SIGHTED']\n \n with sql.connect(\"flowers.db\") as con:\n cur = con.cursor()\n \n cur.execute(\"INSERT INTO SIGHTINGS (NAME,PERSON,LOCATION,SIGHTED) VALUES (?,?,?,?)\",(fname,pname,l,s) )\n\n con.commit()\n msg = \"Record successfully added\"\n except:\n con.rollback()\n msg = \"error in insert operation\"\n \n finally:\n return render_template(\"result.html\",msg = msg)\n con.close()\n\n\n\[email protected]('/update_entry/', methods = ['POST', 'GET'])\ndef update_entry():\n if request.method == 'POST':\n try:\n o = request.form['original']\n g = request.form['genus']\n s = request.form['species']\n c = request.form['cname']\n\n with sql.connect(\"flowers.db\") as con:\n cur = con.cursor()\n \n cur.execute(\"UPDATE FLOWERS SET GENUS=?, SPECIES=?, COMNAME=? WHERE COMNAME = ?\",(g,s,c,o))\n msg = \"Successfully added\"\n \n except:\n con.rollback()\n msg = \"Error in the Addition\"\n finally: \n return render_template(\"result.html\", msg=msg)\n con.close()\n\n\nif __name__ == '__main__':\n app.run(debug = True)"
}
] | 1 |
vishalgupta84/Programmes | https://github.com/vishalgupta84/Programmes | 8c72486777eecf47c41b30b8bd1df116f8fa7fae | ef065e478d89b7fb09f653b41feab3f356963bd5 | 21931fd388f368b4f59ad2d240ebd1300390bdab | refs/heads/master | 2021-01-21T03:13:32.742459 | 2019-07-28T15:42:57 | 2019-07-28T15:42:57 | 65,014,225 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5287356376647949,
"alphanum_fraction": 0.5379310250282288,
"avg_line_length": 16.440000534057617,
"blob_id": "8db9dbcf60fbcb7d3deaebfe1d2ce3801ad44080",
"content_id": "df313d5bf93a958fcbd4cb062ce7c6a2ee623591",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 435,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 25,
"path": "/coding/codechef/DEVUGRAP.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <vector>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t;\n\tlong long int n,k,sum=0,num,rem;\n\tstd::vector<long int> number;\n\tcin >> t;\n\twhile(t--){\n\t\tsum=0;\n\t\tcin >> n >> k;\n\t\t// number.resize(n);\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\tcin >> num;\n\t\t\trem= num % k;\n\t\t\tif(num>=k)\n\t\t\t\trem=min(rem,(k-rem));\n\t\t\telse\n\t\t\t\trem=(k-rem);\n\t\t\tsum = sum + rem;\n\t\t}\n\t\tcout << sum << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5003594756126404,
"alphanum_fraction": 0.5693745613098145,
"avg_line_length": 18.457143783569336,
"blob_id": "f5ca70956590f9be2c2effd75cfc828486966195",
"content_id": "697135695f4ebdee62ce04902f5cdb3f69dcc88d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1391,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 70,
"path": "/coding/interviewbit/checkLevel6_1.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint countInversion(std::vector<int> A){\r\n\tset<int> tree;\r\n\t//cout << \" hikj\";\r\n\tint ans=0;\r\n\tpair<set<int>::iterator,bool> it;\r\n\ttree.insert(A[0]);\r\n\tfor (int i = 1; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tit=tree.insert(A[i]);\r\n\t\t//int idx=it.first;\r\n\t\tint s=distance(tree.find(A[i]),tree.end());//tree.size();\r\n\t\t//cout << s;\r\n\t\tans=ans+s-1;\r\n\t\t//cout << i;\r\n\t}\r\n\treturn ans;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tint sol=countInversion(A);\r\n\tcout << sol<<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint countInversion(std::vector<int> A){\n\tset<int> tree;\n\t//cout << \" hikj\";\n\tint ans=0;\n\tpair<set<int>::iterator,bool> it;\n\ttree.insert(A[0]);\n\tfor (int i = 1; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tit=tree.insert(A[i]);\n\t\t//int idx=it.first;\n\t\tint s=distance(tree.find(A[i]),tree.end());//tree.size();\n\t\t//cout << s;\n\t\tans=ans+s-1;\n\t\t//cout << i;\n\t}\n\treturn ans;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint sol=countInversion(A);\n\tcout << sol<<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4336569607257843,
"alphanum_fraction": 0.4660194218158722,
"avg_line_length": 11.916666984558105,
"blob_id": "559cae02b30c63a45eeff01e9e9dc7d14a9d1fbe",
"content_id": "10c5dba8a8fcafc21a2ab0808885d9e8f8f22998",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 309,
"license_type": "no_license",
"max_line_length": 20,
"num_lines": 24,
"path": "/coding/codechef/valid-tri.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\nusing namespace std;\nint main(){\n\tint t;\n\tcin >> t;\n\twhile(t--){\n\t\tint i,sum=0;\n\t\tfor(i=0;i<3;i++){\n\t\t\tint j;\n\t\t\tcin >> j;\n\t\t\tif(j==0){\n\t\t\t\tcout << \"NO\\n\";\n\t\t\t\tsum=-1;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tsum=sum+j;\n\t\t}\n\t\tif(sum==180)\n\t\t\tcout << \"YES\\n\";\n\t\telse if(sum!=-1)\n\t\t\tcout << \"NO\\n\";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4409148693084717,
"alphanum_fraction": 0.5209656953811646,
"avg_line_length": 20.44444465637207,
"blob_id": "aa6e519594b1108aa01775ded6d7cb828e8edf8c",
"content_id": "98ccf57648da0b135170e3e98eb85aa2f2e4049c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1574,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 72,
"path": "/coding/interviewbit/DP/coinSum.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint coinSum(std::vector<int> A){\r\n\tif(A.empty())\r\n return 0;\r\n if(A.size()==1)\r\n return A[0];\r\n int table[A.size()][A.size()];\r\n\tfor(int k=0;k<A.size();k++){\r\n\t\tfor(int i=0;(i+k)<=A.size();i++){\r\n\t\t\tfor(int i=0, j=k;j<A.size();i++,j++){\r\n\t\t\t\tint x=(i+2)<j?table[i+2][j]:0;\r\n\t\t\t\tint y=(i<j-2)?table[i][j-2]:0;\r\n\t\t\t\tint z=(i+1<j-1)?table[i+1][j-1]:0;\r\n\t\t\t\tint ans=max(A[i]+min(x,z),A[j]+min(y,z));\r\n\t\t\t\ttable[i][j]=ans;\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn table[0][A.size()-1];\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tint sum=coinSum(A);\r\n\tcout << sum <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint coinSum(std::vector<int> A){\n\tif(A.empty())\n return 0;\n if(A.size()==1)\n return A[0];\n int table[A.size()][A.size()];\n\tfor(int k=0;k<A.size();k++){\n\t\tfor(int i=0;(i+k)<=A.size();i++){\n\t\t\tfor(int i=0, j=k;j<A.size();i++,j++){\n\t\t\t\tint x=(i+2)<j?table[i+2][j]:0;\n\t\t\t\tint y=(i<j-2)?table[i][j-2]:0;\n\t\t\t\tint z=(i+1<j-1)?table[i+1][j-1]:0;\n\t\t\t\tint ans=max(A[i]+min(x,z),A[j]+min(y,z));\n\t\t\t\ttable[i][j]=ans;\n\t\t\t}\n\t\t}\n\t}\n\treturn table[0][A.size()-1];\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint sum=coinSum(A);\n\tcout << sum <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4793187379837036,
"alphanum_fraction": 0.5133820176124573,
"avg_line_length": 14.84615421295166,
"blob_id": "06b2023e38f99adfca7cd7566ce840e52e2a6587",
"content_id": "12c76e6ddf86c174117a5d2be5d75a14c2a12dca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 411,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 26,
"path": "/coding/codechef/CFRTEST.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n// #include <vector>\n#include <cstdlib>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,n,num;\n\tbool day[101];\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> n;\n\t\tfor (int i = 0; i < 101; ++i){\n\t\t\tday[i]=false;\n\t\t}\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\tcin >> num ;\n\t\t\tday[num]=true;\n\t\t}\n\t\tnum=0;\n\t\tfor(int i=1;i<101;i++){\n\t\t\tif(day[i])\n\t\t\t\tnum++;\n\t\t}\n\t\tcout << num <<endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4780701696872711,
"alphanum_fraction": 0.4868420958518982,
"avg_line_length": 21.799999237060547,
"blob_id": "09b1c3d910613d9243cbad8bfcc3ed93f11b884c",
"content_id": "e91dbd856f890ee61b81cc16ef2714c86c9369f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 684,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 30,
"path": "/coding/data structure/string.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<cstring>\n#include<cstdlib>\nusing namespace std;\nint main(){\n int n,i,j;\n string *str=NULL,temp;\n cout << \"enter number of strings\\n\";\n cin >> n;\n str=new string[n];\n cout << \"now enter strings\\n\";\n for(i=0;i<n;i++){\n cin >> temp;\n str[i]=temp;\n }\n for(i=0;i<n-1;i++){\n int idx=i;\n for(j=i+1;j<n;j++){\n if(str[j].length()<str[idx].length())\n idx=j;\n }\n temp=str[idx];\n str[idx]=str[i];\n str[i]=temp;\n }\n cout << \"sorted string with respect to length of each string is \\n\";\n for(i=0;i<n;i++)\n cout << str[i] << endl;\n return 0;\n}\n"
},
{
"alpha_fraction": 0.4292527735233307,
"alphanum_fraction": 0.4970853328704834,
"avg_line_length": 20.511627197265625,
"blob_id": "9ca7818e8117869876c77802a09534be6b221d48",
"content_id": "356c180ba57b3cff42913d8aaac60f2b8aa365a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1887,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 86,
"path": "/coding/interviewbit/DP/dungeonPrincess.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint health(std::vector<std::vector<int> > A ){\r\n\tint row=A.size();\r\n int col=A[0].size();\r\n if(row==0)\r\n return 0;\r\n vector<vector<int> > need(row+1,vector<int> (col+1,INT_MAX));\r\n need[row][col-1]=1;\r\n need[row-1][col]=1;\r\n for(int i=row-1;i>=0;i--){\r\n for(int j=col-1;j>=0;j--){\r\n int temp=min(need[i+1][j],need[i][j+1])-A[i][j];\r\n temp=temp<=0?1:temp;\r\n need[i][j]=temp;\r\n }\r\n }\r\n return need[0][0];\r\n}\r\nint main(){\r\n\tint n,m;\r\n\tcin >> n;\r\n\tcin >> m;\t\r\n\tstd::vector<std::vector<int> > A(n,std::vector<int>(m));\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tfor (int j = 0; j < m; ++j)\r\n\t\t{\r\n\t\t\t/* code */\r\n\t\t\tcin >> A[i][j];\r\n\t\t}\r\n\t}\r\n\tcout << endl ;\r\n\tint sol=health(A);\r\n\tcout << sol << endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint health(std::vector<std::vector<int> > A ){\n\tint row=A.size();\n int col=A[0].size();\n if(row==0)\n return 0;\n vector<vector<int> > need(row+1,vector<int> (col+1,INT_MAX));\n need[row][col-1]=1;\n need[row-1][col]=1;\n for(int i=row-1;i>=0;i--){\n for(int j=col-1;j>=0;j--){\n int temp=min(need[i+1][j],need[i][j+1])-A[i][j];\n temp=temp<=0?1:temp;\n need[i][j]=temp;\n }\n }\n return need[0][0];\n}\nint main(){\n\tint n,m;\n\tcin >> n;\n\tcin >> m;\t\n\tstd::vector<std::vector<int> > A(n,std::vector<int>(m));\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tfor (int j = 0; j < m; ++j)\n\t\t{\n\t\t\t/* code */\n\t\t\tcin >> A[i][j];\n\t\t}\n\t}\n\tcout << endl ;\n\tint sol=health(A);\n\tcout << sol << endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5420689582824707,
"alphanum_fraction": 0.5531034469604492,
"avg_line_length": 19.742856979370117,
"blob_id": "eae0a3ab3381021dc46e8a63cf8fda7d45bc64d7",
"content_id": "dc1506488af104c6365dd3c406822d863da93105",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 725,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 35,
"path": "/coding/codechef/WDTBAM.java",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "import java.util.*;\nclass WDTBAM{\n\tpublic static void main(String[] args) {\n\t\tint t,n;\n\t\tScanner reader=new Scanner(System.in);\n\t\tt=reader.nextInt();\n\t\twhile(t>0){\n\t\t\tt--;\n\t\t\tn=reader.nextInt();\n\t\t\t// System.out.println(n);\n\t\t\tString user=reader.next();\n\t\t\tString actual=reader.next();\n\t\t\t// System.out.println(user);\n\t\t\tint [] point=new int[n+1];\n\t\t\tfor (int i=0;i<=n ;i++ ) {\n\t\t\t\tpoint[i]=reader.nextInt();\n\t\t\t}\n\t\t\tint c=0;\n\t\t\tfor (int i=0;i<n ;i++ ) {\n\t\t\t\tif(user.charAt(i)==actual.charAt(i))\n\t\t\t\t\tc++;\n\t\t\t}\n\t\t\tint mx=point[0];\n\t\t\t// System.out.println(c);\n\t\t\tfor (int i=1;i<=c ;i++ ) {\n\t\t\t\tif(mx<point[i])\n\t\t\t\t\tmx=point[i];\n\t\t\t}\n\t\t\tif(c==n)\n\t\t\t\tSystem.out.println(point[n-1]);\n\t\t\telse\n\t\t\t\tSystem.out.println(mx);\n\t\t}\n\t}\n}"
},
{
"alpha_fraction": 0.49484536051750183,
"alphanum_fraction": 0.5331369638442993,
"avg_line_length": 16.894737243652344,
"blob_id": "55705628faaac0a1a95b55615a214195e53301af",
"content_id": "8a7b13a5228f62aa5af601646c26b3a2495a58f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 679,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 38,
"path": "/coding/interviewbit/array/add_one.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<vector>\nusing namespace std;\nstd::vector<int> add1(std::vector<int> &A){\n\tif(A.size()==0)\n\t\treturn A;\n\tA[A.size()-1]=A[A.size()-1]+1;\n\tint carry=A[A.size()-1]/10;\n\tA[A.size()-1]=A[A.size()-1]%10;\n\tfor(int i=A.size()-2;i>=0;i--){\n\t\tA[i]=A[i]+carry;\n\t\tcarry=A[i]/10;\n\t\tA[i]=A[i]%10;\n\t}\n\n\twhile(carry==0&&A[0]==0)\n\t\tA.erase(A.begin(),A.begin()+1);\n\tif(carry){\n\t\tA.insert(A.begin(),carry);\n\t}\n\treturn A;\n}\nint main(){\n\tstd::vector<int> A;\n\tint n;\n\tcin >> n;\n\tint num;\n\tfor (int i = 0; i < n; ++i){\n\t\tcin >> num;\n\t\tA.push_back(num);\n\t}\n\tvector<int> res=add1(A);\n\tfor (int i = 0; i < res.size(); ++i){\n\t\tcout << res[i] << \" \";\n\t}\n\tcout << endl;\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.3854506015777588,
"alphanum_fraction": 0.4408251941204071,
"avg_line_length": 22.8157901763916,
"blob_id": "7e38642eee9c4d1646242b79fa0b6a3e13575071",
"content_id": "cd1dd9948d8f9de2e1fe290da0057614f9ce51d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1842,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 76,
"path": "/coding/geeksforgeeks/Amazon/problem4.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid printMat(vector<std::vector<int> > M ,int row,int col)\r\n{\r\n vector<int> table(row,1);\r\n for(int i=0;i<row;i++){\r\n for(int j=i+1;j<row;j++){\r\n if(M[i]==M[j])\r\n table[j]=0;\r\n }\r\n }\r\n //for(int i=0;i<row;i++)\r\n //cout << \"row= \" << table[i] << endl;\r\n for(int i=0;i<row;i++){\r\n if(table[i]){\r\n for(int j=0;j<col;j++)\r\n cout << M[i][j] << \" \";\r\n cout << \"$\";\r\n }\r\n }\r\n}\r\nint main(){\r\n int col,row;\r\n cin >> row >> col;\r\n std::vector<std::vector<int> > A(row,std::vector<int>(col));\r\n for (int i = 0; i < row; ++i)\r\n {\r\n /* code */\r\n for(int j=0;j<col;j++)\r\n cin >> A[i][j];\r\n }\r\n printMat(A,row,col);\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid printMat(vector<std::vector<int> > M ,int row,int col)\n{\n vector<int> table(row,1);\n for(int i=0;i<row;i++){\n for(int j=i+1;j<row;j++){\n if(M[i]==M[j])\n table[j]=0;\n }\n }\n //for(int i=0;i<row;i++)\n //cout << \"row= \" << table[i] << endl;\n for(int i=0;i<row;i++){\n if(table[i]){\n for(int j=0;j<col;j++)\n cout << M[i][j] << \" \";\n cout << \"$\";\n }\n }\n}\nint main(){\n int col,row;\n cin >> row >> col;\n std::vector<std::vector<int> > A(row,std::vector<int>(col));\n for (int i = 0; i < row; ++i)\n {\n /* code */\n for(int j=0;j<col;j++)\n cin >> A[i][j];\n }\n printMat(A,row,col);\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4477040767669678,
"alphanum_fraction": 0.45790815353393555,
"avg_line_length": 15.086956977844238,
"blob_id": "b58eea5307c4920e918c81c6d9611267e3808bb0",
"content_id": "71526699520764582d9d90e584089c60cb1eda12",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 784,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 46,
"path": "/coding/interviewbit/heap/distinct.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nstd::vector<int> solution(std::vector<int> A,int k){\r\n\tmap<int ,int> mp;\r\n\tstd::vector<int> result;\r\n\tif(A.size()<k)\r\n\t\treturn result;\r\n\tint c=0;\r\n\tfor (int i = 0; i < k; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tif(mp[A[i]]==0)\r\n\t\t\tc++;\r\n\t\tmp[A[i]]++;\r\n\t}\r\n\tresult.push_back(c);\r\n\tfor (int i = k; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tif(mp[A[i-k]]==1)\r\n\t\t\tc--;\r\n\t\tmp[A[i-k]]--;\r\n\t\tif(mp[A[i]]==0)\r\n\t\t\tc++;\r\n\t\tmp[A[i]]++;\r\n\t\tresult.push_back(c);\r\n\t}\r\n\treturn result;\r\n}\r\nint main(){\r\n\tint n,k;\r\n\tcin >> n>>k;\r\n\tstd::vector<int> A(n);\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */cin>> A[i];\r\n\t}\r\n\tstd::vector<int> v=solution(A,k);\r\n\tfor (int i = 0; i < v.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcout << v[i] << \" \";\r\n\t}\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.49462366104125977,
"alphanum_fraction": 0.5075268745422363,
"avg_line_length": 13.53125,
"blob_id": "a4d23df6b4c27e3b7859a0d5dbfab45632182136",
"content_id": "f2fb5f649cfc5ede0fddedd3df2a1c6371ec1060",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 465,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 32,
"path": "/coding/codechef/recipe.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<cstdlib>\nusing namespace std;\nint find_gcd(int a,int b){\n\tif(a<b)\n\t\tfind_gcd(b,a);\n\twhile(a%b!=0){\n\t\tint tmp=b;\n\t\tb=a%b;\n\t\ta=tmp;\n\t}\n\treturn b;\n}\nint main(){\n\tint t;\n\tcin >> t;\n\twhile(t--){\n\t\tint n,i;\n\t\tcin >> n;\n\t\tint arr[n];\n\t\tfor(i=0;i<n;i++)\n\t\t\tcin >> arr[i];\n\t\tint gcd=arr[0];\n\t\tfor(i=1;i<n;i++)\n\t\t\tgcd=find_gcd(arr[i],gcd);\n\t\tfor(i=0;i<n;i++)\n\t\t\tarr[i]=arr[i]/gcd;\n\t\tfor(i=0;i<n;i++)\n\t\t\tcout << arr[i] << \" \";\n\t\tcout << endl;\n\t}\n}\n"
},
{
"alpha_fraction": 0.4715750217437744,
"alphanum_fraction": 0.5610437989234924,
"avg_line_length": 15.903225898742676,
"blob_id": "84f7ac8532a546109465019459b5921e06e5b324",
"content_id": "e5a9d93f67c208d9497c3da6535da61cdcd86104",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1073,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 62,
"path": "/coding/geeksforgeeks/Amazon/problem6.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint solution(std::vector<int> A){\r\n\tint len=A.size();\r\n\tint m=0;\r\n\tfor(int k=0;k<len;k++){\r\n\t\tint sum=0;\r\n\t\tfor(int i=0;i<len;i++){\r\n\t\t\tint pos=(i+k)%len;\r\n\t\t\tsum=sum+pos*A[i];\r\n\t\t}\r\n\t\tif(sum>m)\r\n\t\t\tm=sum;\r\n\t}\r\n\treturn m;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tint sum=solution(A);\r\n\tcout << sum <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint solution(std::vector<int> A){\n\tint len=A.size();\n\tint m=0;\n\tfor(int k=0;k<len;k++){\n\t\tint sum=0;\n\t\tfor(int i=0;i<len;i++){\n\t\t\tint pos=(i+k)%len;\n\t\t\tsum=sum+pos*A[i];\n\t\t}\n\t\tif(sum>m)\n\t\t\tm=sum;\n\t}\n\treturn m;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint sum=solution(A);\n\tcout << sum <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5752032399177551,
"alphanum_fraction": 0.5955284833908081,
"avg_line_length": 13.485294342041016,
"blob_id": "2c2afc459b06101bcddb0d3aa86bd455c1397a3a",
"content_id": "af461a89104bf6390dad7d20f3bb62a15e4d12a8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 984,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 68,
"path": "/coding/codechef/sum_palind.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<cstdlib>\nusing namespace std;\nclass palindrome\n{\n\tint l,r;\n\tlong long unsigned int sum;\npublic:\n\tvoid getRange();\n\tbool determine(int num);\n\tvoid print();\n};\nvoid palindrome::getRange(){\n\tint i;\n\tcin >> l;\n\tcin >> r;\n\tfor(i=l;i<=r;i++){\n\t\tif(determine(i)){\n\t\t\tsum=sum+i;\n\t\t}\n\t}\n} \nbool palindrome::determine(int num){\n\tint temp=0,tmp;\n\ttmp=num;\n\twhile(tmp){\n\t\tint digit=tmp%10;\n\t\ttmp=tmp/10;\n\t\ttemp=temp*10+digit;\n\t}\n\tif(temp==num)\n\t\treturn true;\n\treturn false;\n}\nvoid palindrome::print(){\n\tcout << sum << endl;\n\tsum=0;\n}\n/*int determine(int num){\n\tint temp=0,tmp;\n\ttmp=num;\n\twhile(tmp){\n\t\tint digit=tmp%10;\n\t\ttmp=tmp/10;\n\t\ttemp=temp*10+digit;\n\t}\n\tif(temp==num)\n\t\treturn 1;\n\treturn 0;\n}*/\nint main(){\n\tint n,i,l,r,j;\n\tcin >> n;\n\tfor(i=0;i<n;i++){\n\t\tpalindrome obj;\n\t\tobj.getRange();\n\t\tobj.print();\n\t\t/*cin >> l;\n\t\tcin >> r;\n\t\tlong long unsigned int sum=0;\n\t\tfor(j=l;j<=r;j++){\n\t\t\tif(determine(j))\n\t\t\t\tsum=sum+j;\n\t\t}\n\t\tcout << sum << endl;*/\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.40214067697525024,
"alphanum_fraction": 0.533639132976532,
"avg_line_length": 17.13888931274414,
"blob_id": "1226fc856eaa99fd6e027b436b448d5213ac4a41",
"content_id": "e51a48d0d002b0a2abbb2f75246b02636e73afa8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 654,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 36,
"path": "/coding/project euler/problem67.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\nint main(){\n\tint i,j;\n\tint n,value;\n\tint *arr[100];\n\tfor(i=0;i<100;i++){\n\t\tarr[i]=(int *)malloc(sizeof(int)*(i+1));\n\t}\n\tFILE *fp=fopen(\"triangle.txt\",\"r\");\n\tfor (i = 0; i < 100; ++i){\n\t\tfor(j=0;j<=i;j++){\n\t\t\tfscanf(fp,\"%d\", &value);\n\t\t\tarr[i][j]=value;\n\t\t}\n\t}\n\tfor(i=99;i>0;i--){\n\t\tfor(j=0;j<i;j++){\n\t\t\tint sum1=arr[i][j]+arr[i-1][j];\n\t\t\tint sum2=arr[i][j+1]+arr[i-1][j];\n\t\t\tint max=sum1>sum2?sum1:sum2;\n\t\t\tarr[i-1][j]=max;\n\t\t}\n\t}\n\tprintf(\"%d\\n\",arr[0][0]);\n\n\treturn 0;\n<<<<<<< HEAD\n<<<<<<< HEAD\n}\r\n=======\n}\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n}\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n"
},
{
"alpha_fraction": 0.2870637774467468,
"alphanum_fraction": 0.32117554545402527,
"avg_line_length": 22.658227920532227,
"blob_id": "030489471d047f9168dd008e6600cc2d515dadff",
"content_id": "87cda3038305ae5ef55d2c743e8e5eab4c937284",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3811,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 158,
"path": "/coding/geeksforgeeks/Amazon/problem7.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint findK(vector<std::vector<int> > A, int n, int m, int k)\r\n{\r\n//Your code here\r\n int l=0,top=0,right=m-1,down=n-1,left=0,dir=1,flag=0;\r\n while(flag==0){\r\n if(dir==1){\r\n for(int i=left;i<=right;i++){\r\n l++;\r\n if(l==k){\r\n cout << A[top][i] << endl;\r\n flag=1;\r\n break;\r\n }\r\n }\r\n dir=2;\r\n top++;\r\n }\r\n if(dir==2){\r\n for(int i=top;i<=down;i++){\r\n l++;\r\n if(l==k){\r\n cout << A[i][right] << endl;\r\n flag=1;\r\n break;\r\n }\r\n }\r\n dir=3;\r\n right--;\r\n }\r\n if(dir==3){\r\n for(int i=right;i>=left;i--){\r\n l++;\r\n if(l==k){\r\n cout << A[down][i] << endl;\r\n flag=1;\r\n break;\r\n }\r\n }\r\n dir=4;\r\n down--;\r\n }\r\n if(dir==4){\r\n for(int i=down;i>=top;i--){\r\n l++;\r\n if(l==k){\r\n cout << A[i][left] << endl;\r\n flag=1;\r\n break;\r\n }\r\n }\r\n left++;\r\n dir=1;\r\n }\r\n }\r\n return 0;\r\n}\r\nint main(){\r\n int col,row;\r\n cin >> row >> col;\r\n std::vector<std::vector<int> > A(row,std::vector<int>(col));\r\n for (int i = 0; i < row; ++i)\r\n {\r\n /* code */\r\n for(int j=0;j<col;j++)\r\n cin >> A[i][j];\r\n }\r\n //printMat(A,row,col);\r\n int k;\r\n cin >> k;\r\n int sol=findK(A,row,col,k);\r\n //cout << sol << endl;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint findK(vector<std::vector<int> > A, int n, int m, int k)\n{\n//Your code here\n int l=0,top=0,right=m-1,down=n-1,left=0,dir=1,flag=0;\n while(flag==0){\n if(dir==1){\n for(int i=left;i<=right;i++){\n l++;\n if(l==k){\n cout << A[top][i] << endl;\n flag=1;\n break;\n }\n }\n dir=2;\n top++;\n }\n if(dir==2){\n for(int i=top;i<=down;i++){\n l++;\n if(l==k){\n cout << A[i][right] << endl;\n flag=1;\n break;\n }\n }\n dir=3;\n right--;\n }\n if(dir==3){\n for(int i=right;i>=left;i--){\n l++;\n if(l==k){\n cout << A[down][i] << endl;\n flag=1;\n break;\n }\n }\n dir=4;\n down--;\n }\n if(dir==4){\n for(int i=down;i>=top;i--){\n l++;\n if(l==k){\n cout << A[i][left] << endl;\n flag=1;\n break;\n }\n }\n left++;\n dir=1;\n }\n }\n return 0;\n}\nint main(){\n int col,row;\n cin >> row >> col;\n std::vector<std::vector<int> > A(row,std::vector<int>(col));\n for (int i = 0; i < row; ++i)\n {\n /* code */\n for(int j=0;j<col;j++)\n cin >> A[i][j];\n }\n //printMat(A,row,col);\n int k;\n cin >> k;\n int sol=findK(A,row,col,k);\n //cout << sol << endl;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5598006844520569,
"alphanum_fraction": 0.5730897188186646,
"avg_line_length": 15.297297477722168,
"blob_id": "04e6cf4d9ddcf9228e148664fd53f7533580e7f5",
"content_id": "c8e068fbad26c2a61692044164e27868f103c502",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 602,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 37,
"path": "/coding/interviewbit/array/noble_integer.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<bits/stdc++.h>\nusing namespace std;\nclass noble_integer\n{\n\tstd::vector<int> A;\npublic:\n\tnoble_integer(int n);\n\tint solution();\n};\nnoble_integer::noble_integer(int n){\n\tA.resize(n);\n\tfor (int i = 0; i < n; ++i){\n\t\tcin >> A[i];\n\t}\n}\nint noble_integer::solution(){\n\tsort(A.begin(),A.end());\n\tfor(int i=0;i<A.size();i++){\n\t\tint diff=A.size()-1-i;\n\t\tint j=i+1;\n\t\twhile((j<A.size())&&A[j]==A[i]){\n\t\t\tj++;\n\t\t}\n\t\tdiff=diff-(j-i-1);\n\t\tif(A[i]==diff)\n\t\t\treturn 1;\n\t}\n\treturn -1;\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tnoble_integer noble(n);\n\tint result=noble.solution();\n\tcout << result <<endl;\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4832134246826172,
"alphanum_fraction": 0.5455635786056519,
"avg_line_length": 15.9375,
"blob_id": "2c8afc854432fcda8a93f70aaf87422d98398dbc",
"content_id": "4b473345bd2e4df2073f5b036ae88e8b0b816ddb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1668,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 96,
"path": "/coding/interviewbit/array/nextPermutation.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nbool compare(int a,int b){\r\n\treturn a>b;\r\n}\r\nvoid print(std::vector<int> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\tcout << A[i] << \" \";\r\n\t}\r\n\tcout << endl;\r\n}\r\nstd::vector<int> nextPermutation(std::vector<int> A){\r\n\tif(A.size()==0)\r\n\t\treturn A;\r\n\tint i=A.size()-2;\r\n\twhile(i>=0&&A[i]>A[i+1])\r\n\t\ti--;\r\n\tif(i<0){\r\n\t\tsort(A.begin(),A.end());\r\n\t\treturn A;\r\n\t}\r\n\tint j=A.size()-1;\r\n\twhile(j>i){\r\n\t\tif(A[j]>A[i])\r\n\t\t\tbreak;\r\n\t\tj--;\r\n\t}\r\n\tswap(A[i],A[j]);\r\n\tsort(A.begin()+i+1,A.end());\r\n\treturn A;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tprint(A);\r\n\tstd::vector<int> result=nextPermutation(A);\r\n\tprint(result);\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nbool compare(int a,int b){\n\treturn a>b;\n}\nvoid print(std::vector<int> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\tcout << A[i] << \" \";\n\t}\n\tcout << endl;\n}\nstd::vector<int> nextPermutation(std::vector<int> A){\n\tif(A.size()==0)\n\t\treturn A;\n\tint i=A.size()-2;\n\twhile(i>=0&&A[i]>A[i+1])\n\t\ti--;\n\tif(i<0){\n\t\tsort(A.begin(),A.end());\n\t\treturn A;\n\t}\n\tint j=A.size()-1;\n\twhile(j>i){\n\t\tif(A[j]>A[i])\n\t\t\tbreak;\n\t\tj--;\n\t}\n\tswap(A[i],A[j]);\n\tsort(A.begin()+i+1,A.end());\n\treturn A;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tprint(A);\n\tstd::vector<int> result=nextPermutation(A);\n\tprint(result);\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.44397464394569397,
"alphanum_fraction": 0.5412262082099915,
"avg_line_length": 17.756755828857422,
"blob_id": "2b1615ee7cbb0cb10da1c47fb29a21f7658ee74f",
"content_id": "d57f81eb2fb913f0c46b0a2f10632088c0a25943",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1419,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 74,
"path": "/coding/interviewbit/DP/countBst.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\n#define mod 1000000007\r\nusing namespace std;\r\nlong int countBST(int n,int h){\r\n\t//int table[]\r\n\tstd::vector<std::vector<int> > table(h+1,std::vector<int>(n+1));\r\n\ttable[0][1]=1;\r\n\tfor (int i = 2; i <= n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\ttable[0][i]=0;\r\n\t}\r\n\tfor (int i = 0; i <= h; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\ttable[i][0]=1;\r\n\t}\r\n\tfor(int i=1;i<=h;i++){\r\n\t\tfor(int j=1;j<=n;j++)\r\n\t\t\ttable[i][j]=(table[i-1][j-1]*table[i-1][n-j])%mod;\r\n\t}\r\n\treturn table[h][n];\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tint h;\r\n\tcin >> h;\r\n\tlong int sol=countBST(n,h);\r\n\tcout << sol <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\n#define mod 1000000007\nusing namespace std;\nlong int countBST(int n,int h){\n\t//int table[]\n\tstd::vector<std::vector<int> > table(h+1,std::vector<int>(n+1));\n\ttable[0][1]=1;\n\tfor (int i = 2; i <= n; ++i)\n\t{\n\t\t/* code */\n\t\ttable[0][i]=0;\n\t}\n\tfor (int i = 0; i <= h; ++i)\n\t{\n\t\t/* code */\n\t\ttable[i][0]=1;\n\t}\n\tfor(int i=1;i<=h;i++){\n\t\tfor(int j=1;j<=n;j++)\n\t\t\ttable[i][j]=(table[i-1][j-1]*table[i-1][n-j])%mod;\n\t}\n\treturn table[h][n];\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tint h;\n\tcin >> h;\n\tlong int sol=countBST(n,h);\n\tcout << sol <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.654321014881134,
"alphanum_fraction": 0.6931216716766357,
"avg_line_length": 62,
"blob_id": "248c3b4c2faa6a55a33c5b37eac47b6e7b51cbbe",
"content_id": "7aa63b79c75fc6d8ad44c448c18ae1c9d0b37129",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 567,
"license_type": "no_license",
"max_line_length": 302,
"num_lines": 9,
"path": "/script/proxy-script.py",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "import os\nport=8080\nfor i in range(255):\n\tfor j in range(255):\n\t\tip=\"http_proxy=http://172.16.\"+str(i)+'.'+str(j)+\"/ curl -s --connect-timeout \"+str(.05)+\" www.google.com/humans.txt\"\n\t\tread=os.popen(ip).read();\n\t\tif(read==\"Google is built by a large team of engineers, designers, researchers, robots, and others in many different sites across the globe. It is updated continuously, and built with more tools and technologies than we can shake a stick at. If you'd like to help us out, see google.com/careers.\\n\"):\n\t\t\t#os.system()\n\t\t\tprint \"172.16.\"+str(i)+'.'+str(j)\n"
},
{
"alpha_fraction": 0.5136842131614685,
"alphanum_fraction": 0.5347368717193604,
"avg_line_length": 14.866666793823242,
"blob_id": "cbca47a171e89f4c188b8af744ebaec5c59724d1",
"content_id": "ddbda37e51d54bab2ed692bb5ec521a9460e3cf7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 475,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 30,
"path": "/coding/codechef/CANDY123.java",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "import java.util.*;\nimport java.lang.*;\nimport java.io.*;\nclass CANDY123{\n\tpublic static void main(String[] args) {\n\t\tint t,a,b;\n\t\tScanner reader=new Scanner(System.in);\n\t\tt=reader.nextInt();\n\t\twhile(t>0){\n\t\t\tt--;\n\t\t\ta=reader.nextInt();\n\t\t\tb=reader.nextInt();\n\t\t\tint f=1,s=2;\n\t\t\twhile(true){\n\t\t\t\ta=a-f;\n\t\t\t\tif(a<0){\n\t\t\t\t\tSystem.out.println(\"Bob\");\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tb=b-s;\n\t\t\t\tif(b<0){\n\t\t\t\t\tSystem.out.println(\"Limak\");\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tf+=2;\n\t\t\t\ts+=2;\n\t\t\t}\n\t\t}\n\t}\n}"
},
{
"alpha_fraction": 0.4000000059604645,
"alphanum_fraction": 0.5977011322975159,
"avg_line_length": 14.777777671813965,
"blob_id": "98187173b4ff9868d04c7d386ef95be1a3f8d684",
"content_id": "2c3e09ee500ec49dfb8b750034142a88d6302d0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 435,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 27,
"path": "/coding/geeksforgeeks/tree/main.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include <iostream>\r\n\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n cout << \"Hello world!\" << endl;\r\n return 0;\r\n}\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include <iostream>\n\nusing namespace std;\n\nint main()\n{\n cout << \"Hello world!\" << endl;\n return 0;\n}\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n"
},
{
"alpha_fraction": 0.5090909004211426,
"alphanum_fraction": 0.5272727012634277,
"avg_line_length": 15.083333015441895,
"blob_id": "d085fc0f29144e1f2e70240d386c414b668e6ce9",
"content_id": "305679f691070463268a124a4a04109e117c4bd6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 385,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 24,
"path": "/coding/codechef/LONGSEQ.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n// #include <vector>\n#include <cstdlib>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,z,o;\n\tstring number;\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> number;\n\t\tz=o=0;\n\t\tfor(int i=0;i<number.size();i++){\n\t\t\tif(number[i]=='0')\n\t\t\t\to++;\n\t\t\tif(number[i]=='1')\n\t\t\t\tz++;\n\t\t}\n\t\tif(z==1||o==1)\n\t\t\tcout << \"Yes\" << endl;\n\t\telse\n\t\t\tcout << \"No\\n\";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.7098039388656616,
"alphanum_fraction": 0.7254902124404907,
"avg_line_length": 18.69230842590332,
"blob_id": "5727a024fce59f03685d703bcd0722f1784808b0",
"content_id": "079414961d65bee0ac393a81448d3157b77a23f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 255,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 13,
"path": "/nathadan.py",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "import xlsxwriter\n\nworkbook = xlsxwriter.Workbook('sample.xlsx')\nworksheet = workbook.add_worksheet()\n\nworksheet.write('A1', 'S.No')\nworksheet.write('B1', 'Name')\nworksheet.write('C1', 'Roll Number')\nworksheet.write('D1', 'Department')\n\n\n\nworkbook.close()"
},
{
"alpha_fraction": 0.5876436829566956,
"alphanum_fraction": 0.5991379022598267,
"avg_line_length": 18.91428565979004,
"blob_id": "1a9b62e29b44e4a1a0ab27fa51271309a8d9af77",
"content_id": "c5ee029e538812712d61ac3ef449db17c5095473",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 696,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 35,
"path": "/coding/interviewbit/array/k_th_pascal_row.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<bits/stdc++.h>\nusing namespace std;\nclass k_th_pascal_row\n{\npublic:\n\t//k_th_pascal_row(int A);\n\tvector<int> generate(int A);\n};\nstd::vector<int> k_th_pascal_row::generate(int A){\n\t//A=A+1;\n\tstd::vector<int> result;\n\tfor (int i = 0; i <= A; ++i){\n\t\tstd::vector<int> inner;\n\t\tif(result.size())\n\t\t\tinner.push_back(result[0]);\n\t\tfor(int j=1;j<result.size();j++){\n\t\t\tinner.push_back(result[j]+result[j-1]);\n\t\t}\n\t\tinner.push_back(1);\n\t\tresult=inner;\n\t}\n\treturn result;\n}\nint main(){\n\tk_th_pascal_row pascal_row;\n\tint A;\n\tcin >> A;\n\tstd::vector<int> result=pascal_row.generate(A);\n\tfor (int i = 0; i < result.size(); ++i){\n\t\t/* code */\n\t\tcout << result[i] << \" \";\n\t}\n\tcout << endl;\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4748663008213043,
"alphanum_fraction": 0.5796791315078735,
"avg_line_length": 17.31999969482422,
"blob_id": "065d66211a6a77be1ff4f58b3db46910f1a4e8b6",
"content_id": "571dfb32be7f467bf288a304cbb8b24e91a1be67",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 935,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 50,
"path": "/coding/interviewbit/DP/maxprofit_2.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint maxProfit(std::vector<int> A){\r\n\tint ans=0;\r\n\tfor(int i=0;i<A.size()-1;i++){\r\n\t\tif(A[i]<A[i+1])\r\n\t\t\tans=ans+A[i+1]-A[i];\r\n\t}\r\n\treturn ans;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tint profit=maxProfit(A);\r\n\tcout << profit <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint maxProfit(std::vector<int> A){\n\tint ans=0;\n\tfor(int i=0;i<A.size()-1;i++){\n\t\tif(A[i]<A[i+1])\n\t\t\tans=ans+A[i+1]-A[i];\n\t}\n\treturn ans;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint profit=maxProfit(A);\n\tcout << profit <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4683840870857239,
"alphanum_fraction": 0.5402029752731323,
"avg_line_length": 15,
"blob_id": "14b20cb5de90bc259817ea15806e93547b1a9222",
"content_id": "152ffce412c2ba488c49d1ab07fd94a2f0944b30",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1281,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 78,
"path": "/coding/interviewbit/DP/longestParenthesis.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint longest(string str){\r\n\tstack<int> s;\r\n\tint last=-1;\r\n\tint maxLen=0;\r\n\tfor (int i = 0; i < str.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tif(str[i]=='(')\r\n\t\t\ts.push(i);\r\n\t\telse{\r\n\t\t\tif(s.empty())\r\n\t\t\t\tlast=i;\r\n\t\t\telse{\r\n\t\t\t\ts.pop();\r\n\t\t\t\tif(s.empty())\r\n\t\t\t\t\tmaxLen=max(maxLen,i-last);\r\n\t\t\t\telse\r\n\t\t\t\t\tmaxLen=max(maxLen,i-s.top());\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn maxLen;\r\n}\r\nint main(){\r\n\tint n;\r\n\t//cin >> n;\t\r\n\tstring str;\r\n\tcin >> str;\r\n\tint sol=longest(str);\r\n\tcout << sol << endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint longest(string str){\n\tstack<int> s;\n\tint last=-1;\n\tint maxLen=0;\n\tfor (int i = 0; i < str.size(); ++i)\n\t{\n\t\t/* code */\n\t\tif(str[i]=='(')\n\t\t\ts.push(i);\n\t\telse{\n\t\t\tif(s.empty())\n\t\t\t\tlast=i;\n\t\t\telse{\n\t\t\t\ts.pop();\n\t\t\t\tif(s.empty())\n\t\t\t\t\tmaxLen=max(maxLen,i-last);\n\t\t\t\telse\n\t\t\t\t\tmaxLen=max(maxLen,i-s.top());\n\t\t\t}\n\t\t}\n\t}\n\treturn maxLen;\n}\nint main(){\n\tint n;\n\t//cin >> n;\t\n\tstring str;\n\tcin >> str;\n\tint sol=longest(str);\n\tcout << sol << endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.7046263217926025,
"alphanum_fraction": 0.725978672504425,
"avg_line_length": 27.100000381469727,
"blob_id": "d52a29237157df13c0af6f2029cc14b7f2f11415",
"content_id": "2172f6f4911c83ad85adb5bd097ad7c64b0e7ae6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 562,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 20,
"path": "/w.js",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "window.onload = function() {\n\tvar pos;\n\tvar age={\n\t\tmaximumAge:5*60*1000,\n\t\ttimeout:5*1000,\n\t\tenableHighAccuracy:true\n\t}\n\tvar getLocation = function(position) {\n\t\tpos=position;\n\t\tvar latitude=pos.coords.latitude;\n\t\tvar longitude=pos.coords.longitude;\n\t\t//alert(\"latitude:\"latitude);\n\t\tdocument.getElementById('loc').innerHTML = \"latitude:\"+ latitude+\"\\nlongitude:\"+longitude;\n console.log(longitude);\n\t};\n\tvar err=function(error){\n\t\t//document.getElementById('loc').innerHTML=error.code;\n\t};\n\tnavigator.geolocation.getCurrentPosition(getLocation,err,age);\n};\n"
},
{
"alpha_fraction": 0.4905149042606354,
"alphanum_fraction": 0.5094850659370422,
"avg_line_length": 15.086956977844238,
"blob_id": "6bf27e309a1baefb780a1896725ab299b55888df",
"content_id": "2d459e642c0386e7a4d1b43cc718147768751d92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 369,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 23,
"path": "/coding/codechef/amb-permute.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\nint main(){\n\tint n,i;\n\tscanf(\"%d\", &n);\n\twhile(n){\n\t\tint *arr=(int *)malloc(n*sizeof(int));\n\t\tint flag=0;\n\t\tfor(i=0;i<n;i++)\n\t\t\tscanf(\"%d\", &arr[i]);\n\t\tfor(i=0;i<n;i++){\n\t\t\tif(arr[arr[i]-1]!=i+1){\n\t\t\t\tprintf(\"not ambiguous\\n\");\n\t\t\t\tflag=1;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t\tif(!flag)\n\t\t\tprintf(\"ambiguous\\n\");\n\t\tscanf(\"%d\", &n);\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4318840503692627,
"alphanum_fraction": 0.48115941882133484,
"avg_line_length": 14.727272987365723,
"blob_id": "8b507b66633192879c0fe8138aa8b6c630700126",
"content_id": "f3b85d4d6c11e659dca5a46d6ae0676d4c0c243c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 345,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 22,
"path": "/coding/codechef/CHEFSTLT.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\n#include<string.h>\nint main(){\n\tint t;\n\tscanf(\"%d\", &t);\n\twhile(t--){\n\t\tchar x[101],y[101];\n\t\tscanf(\"%s\",x);\n\t\tscanf(\"%s\",y);\n\t\tint c1=0,c2=0;\n\t\tint len=strlen(x),i;\n\t\tfor(i=0;i<len;i++){\n\t\t\tif(x[i]=='?'||y[i]=='?')\n\t\t\t\tc1++;\n\t\t\telse if(x[i]!=y[i])\n\t\t\t\tc2++;\n\t\t}\n\t\tprintf(\"%d %d\\n\",c2,c2+c1);\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.45255473256111145,
"alphanum_fraction": 0.47445255517959595,
"avg_line_length": 12.733333587646484,
"blob_id": "bd8e04614acaaca797bc0e8b71e573a0a3ce35fe",
"content_id": "49671f868f7d61cb99e61a1998e5e3831588d2bf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 411,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 30,
"path": "/coding/codechef/TWOSTR.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\n#include<string.h>\nint main()\n{\n\t/* code */\n\tint t;\n\tscanf(\"%d\", &t);\n\twhile(t--){\n\t\tchar x[11],y[11];\n\t\tint i;\n\t\tscanf(\"%s\",x);\n\t\tscanf(\"%s\",y);\n\t\tint len=strlen(x);\n\t\tint flag=0;\n\t\tfor(i=0;i<len;i++){\n\t\t\tif(x[i]=='?'||y[i]=='?')\n\t\t\t\tcontinue;\n\t\t\telse if(x[i]!=y[i])\n\t\t\t{\n\t\t\t\tprintf(\"No\\n\");\n\t\t\t\tflag=1;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t\tif(flag==0)\n\t\t\tprintf(\"Yes\\n\");\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.375314861536026,
"alphanum_fraction": 0.6070529222488403,
"avg_line_length": 16.81818199157715,
"blob_id": "7216ce8aa7030bd369ab343db620399324a1625a",
"content_id": "441bdf95b467ef13cd227e4c205534e3d0644fe8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 397,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 22,
"path": "/coding/interviewbit/DP/uniqueBST.py",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\nA=input(\"Number\");\r\nup=1\r\ndown=1;\r\nfor i in range(2,A+1):\r\n\tup=up*(i+A);\r\n\tdown=down*i;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\nA=input(\"Number\");\nup=1\ndown=1;\nfor i in range(2,A+1):\n\tup=up*(i+A);\n\tdown=down*i;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\nprint(up/down);"
},
{
"alpha_fraction": 0.5526315569877625,
"alphanum_fraction": 0.5679824352264404,
"avg_line_length": 14.233333587646484,
"blob_id": "9937a918105f61ada2f70b00db82374937cbf283",
"content_id": "252da1c322e8f46152ed74fa36073cb5ded40b89",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 456,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 30,
"path": "/coding/codechef/TICKETS5.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\nusing namespace std;\nbool check(string s){\n\tchar f=s[0];\n\tchar o=s[1];\n\tif(f==o)\n\t\treturn false;\n\tint len=s.size();\n\tif(len%2)\n\t\treturn false;\n\tfor(int i=0;i+2<len;i++){\n\t\tif(s[i]!=s[i+2])\n\t\t\treturn false;\n\t}\n\treturn true;\n}\nint main(int argc, char const *argv[]){\n\tint t;\n\t// long long unsigned int sum,t,m,b,n;\n\tcin >> t;\n\tstring s;\n\twhile(t--){\n\t\tcin >> s;\n\t\tif(check(s))\n\t\t\tcout << \"YES\\n\";\n\t\telse\n\t\t\tcout << \"NO\\n\";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5307376980781555,
"alphanum_fraction": 0.5471311211585999,
"avg_line_length": 14.28125,
"blob_id": "77ca611705df9285ebd0d6b9ab301497307130ff",
"content_id": "55487a9415a5bd011520c524c053e0add87a27fa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 488,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 32,
"path": "/coding/n_c_r.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nlong long unsigned int n_c_r(int n,int r){\n\tint i;\n\tlong long unsigned int full=1,half=1,other=1;\n\tfor(i=2;i<=n;i++){\n\t\tfull=full*i;\n\t\tif(i<=r){\n\t\t\thalf=half*i;\n\t\t}\n\n\t\tif(i<=(n-r)){\n\t\t\tother=other*i;\n\t\t}\n\t\tif(full%half==0){\n\t\t\tfull=full/half;\n\t\t\thalf=1;\n\t\t}\n\t\tif (full%other==0)\n\t\t{\n\t\t\t/* code */\n\t\t\tfull=full/other;\n\t\t\tother=1;\n\t\t}\n\t}\n\treturn full;\n}\nint main(){\n\tint n,r;\n\tscanf(\"%d%d\", &n, &r);\n\tlong long unsigned int ans=n_c_r(n,r);\n\tprintf(\"ans is %llu\\n\",ans );\n}"
},
{
"alpha_fraction": 0.49771273136138916,
"alphanum_fraction": 0.508691668510437,
"avg_line_length": 22.7608699798584,
"blob_id": "ca30648073bce02d4cbec1962a05703cb4b8737b",
"content_id": "cddc0826dae7bf675039a22fc95319b4214a928f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1093,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 46,
"path": "/coding/interviewbit/linkedList/reOrderList.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "/**\n * Definition for singly-linked list.\n * struct ListNode {\n * int val;\n * ListNode *next;\n * ListNode(int x) : val(x), next(NULL) {}\n * };\n */\nListNode* Solution::reorderList(ListNode* A) {\n ListNode *next = A, *reverseA = nullptr;\n int count = 0;\n while(next){\n ListNode *temp = new ListNode(next->val);\n next = next ->next;\n temp -> next = reverseA;\n reverseA = temp;\n count++;\n }\n ListNode *ans = nullptr;\n int cc = count/2;\n while(cc--){\n ListNode *node1 = new ListNode(A->val);\n node1->next = ans;\n ans = node1;\n ListNode *node2 = new ListNode(reverseA->val);\n node2->next = ans;\n ans = node2;\n A = A->next;\n reverseA = reverseA->next;\n }\n \n if(count%2){\n ListNode *node1 = new ListNode(A->val);\n node1->next = ans;\n ans = node1;\n }\n next = ans;\n ListNode *prev = nullptr;\n while(next){\n ListNode *temp = next->next;\n next->next = prev;\n prev = next;\n next = temp;\n }\n return prev;\n}\n"
},
{
"alpha_fraction": 0.44170403480529785,
"alphanum_fraction": 0.46188339591026306,
"avg_line_length": 15.55555534362793,
"blob_id": "9f59013e6cc56b412af2898e88df1717d98351ed",
"content_id": "c45c25503c382d0001a0e1b4eb251ca4df37d89a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 446,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 27,
"path": "/coding/codechef/TALAZY.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\t// int t,m,b,n;\n\tlong long unsigned int sum,t,m,b,n;\n\tcin >> t;\n\twhile(t--){\n\t\tsum=0;\n\t\tcin >> n >> b >> m;\n\t\twhile(n){\n\t\t\tint done;\n\t\t\tif(n%2==0){\n\t\t\t\tdone=n/2;\n\t\t\t}\n\t\t\telse\n\t\t\t\tdone=(n+1)/2;\n\t\t\tn=n/2;\n\t\t\tsum=sum+done*m;\n\t\t\tm=2*m;\n\t\t\tif(n)\n\t\t\tsum+=b;\n\t\t\t// cout << done << \" \" << m << \" \" << n << \" \" << \" \" << sum<<endl;\n\t\t}\n\t\tcout << sum <<endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4063745141029358,
"alphanum_fraction": 0.5115537643432617,
"avg_line_length": 18.838708877563477,
"blob_id": "c6875b6316c786464497f682dcd764e65a233045",
"content_id": "c3379b5c03c022e4deaaf3a43ccd7111c57ce95d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1255,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 62,
"path": "/coding/interviewbit/DP/chordsInCircle.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\n#define mod 1000000007\r\nint number(int n){\r\n\tvector<int> A;\r\n A.push_back(1);\r\n int i;\r\n int val1,val2=A[0],last=A[A.size()-1];\r\n for(i=0;i<2*n;i++){\r\n for(int j=1;j<A.size();j++){\r\n val1=A[j];\r\n A[j]=(A[j]+val2)%mod;\r\n val2=val1;\r\n }\r\n A.push_back(last);\r\n }\r\n int num=(A[n]/(n+1));\r\n return num;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tint sol=number(n);\r\n\tcout << sol << endl;\t\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\n#define mod 1000000007\nint number(int n){\n\tvector<int> A;\n A.push_back(1);\n int i;\n int val1,val2=A[0],last=A[A.size()-1];\n for(i=0;i<2*n;i++){\n for(int j=1;j<A.size();j++){\n val1=A[j];\n A[j]=(A[j]+val2)%mod;\n val2=val1;\n }\n A.push_back(last);\n }\n int num=(A[n]/(n+1));\n return num;\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tint sol=number(n);\n\tcout << sol << endl;\t\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.6431636810302734,
"alphanum_fraction": 0.6480686664581299,
"avg_line_length": 21.054054260253906,
"blob_id": "b79e4e68fa5aff33a33ae405a4c34ea6c889ec17",
"content_id": "6bab4fa535845bd8df844619ad6f5b1ec985852b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1631,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 74,
"path": "/hello.py",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "from whoosh.index import create_in\nfrom whoosh.fields import *\nimport web\n\n\nimport whoosh\nfrom whoosh.index import create_in\nfrom whoosh.fields import *\nfrom bs4 import BeautifulSoup\nimport string\nimport sys,urllib2,urllib\nfrom whoosh.qparser import QueryParser\n\n\n\nurls = [\n '/hello','Index'\n]\nrender = web.template.render('templates/')\n\n\ndef search(data):\n\tarray =[]\n\t'''array = []\n\twith open(\"links.txt\", \"r\") as ins:\n\t\tfor line in ins:\n\t\t\tif data in line:\n\t\t\t\tarray.append(str(line))\n\t'''\n\t#return \"\\n\".join(array)\n\tfp= open('solution.txt','w')\n\tschema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT)\n\tix = create_in(\"test\", schema)\n\twriter = ix.writer()\n\twith open(\"links.txt\", \"r\") as ins:\n\t\tfor line in ins:\n\t\t\turl = line\n\t\t\tif data in line:\n\t\t\t\twriter.add_document(title=unicode(url,\"utf-8\"),path=unicode(\"/\"+url,\"utf-8\"),content=unicode(line))\n\t\t\t\tarray.append(str(line))\n\t\t\t\tfp.write(line + \"\\n\")\n\n\twriter.commit()\n\tfp.close()\n\treturn \"\\n\".join(array)\n\n'''\n\twith open('links.txt') as fp:\n\t\tfor line in fp:\n\t\t\turl=line\n\t\t\tif data not in url:\n\t\t\t\tprint line\n\t\t\t\ttry:\n\t\t\t\t\tf=urllib.urlopen(url)\n\t\t\t\t\tcontent1=f.read()\n\t\t\t\t\tsoup = BeautifulSoup(content1)\n\t\t\t\t\ttitle = soup.get_text()\n\t\t\t\t\twriter.add_document(title=unicode(url,\"utf-8\"),path=unicode(\"/\"+url,\"utf-8\"),content=unicode(title))\n\t\t\t\texcept Exception as e:\n\t\t\t\tprint 'caugth exception'\n''' \n \n\nclass Index(object):\n def GET(self):\n return render.hello_form()\n\n def POST(self):\n form = web.input(greet=\"Hello\")\n return search(form.greet)\n\nif __name__==\"__main__\":\n\tapp = web.application(urls, globals())\n\tapp.run()"
},
{
"alpha_fraction": 0.48739495873451233,
"alphanum_fraction": 0.506302535533905,
"avg_line_length": 16.035715103149414,
"blob_id": "d9d81a786fbd3af9ac8a18c7f04745fa7ff275ad",
"content_id": "a0edd66b10ee9fad1315725523ac61608fef9dcf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 476,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 28,
"path": "/coding/codechef/STRPALIN.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <vector>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t;\n\tstring a , b;\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> a >> b;\n\t\tbool flag=false;\n\t\tbool ch[26]={false};\n\t\tfor (int i = 0; i < a.size(); ++i){\n\t\t\tint val=((int)a[i]-97);\n\t\t\tch[val]=true;\n\t\t}\n\t\tfor (int i = 0; i < b.size(); ++i){\n\t\t\t/* code */\n\t\t\tint val=((int)b[i]-97);\n\t\t\tif(ch[val])\n\t\t\t\tflag=true;\n\t\t}\n\t\tif(flag)\n\t\t\tcout << \"Yes\\n\";\n\t\telse\n\t\t\tcout << \"No\\n\";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.42080745100975037,
"alphanum_fraction": 0.46739131212234497,
"avg_line_length": 14.357142448425293,
"blob_id": "1334c41f119770755b58c52d6725b89f45c2d6ce",
"content_id": "67c614602a4b99fdf71fb064fdc8a270925947e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 644,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 42,
"path": "/coding/codechef/transform.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\n#include<string.h>\n\nint main(){\n\tint t;\n\tscanf(\"%d\", &t);\n\twhile(t--){\n\t\tchar str[400],stack[400];\n\t\tint size=0,i;\n\t\tscanf(\"%s\",str);\n\t\tfor(i=0;i<strlen(str);i++){\n\t\t\tint val=str[i];\n\t\t\tint flag=0;\n\t\t\tif(val>=65&&val<=90){\n\t\t\t\tprintf(\"%c\",str[i]);\n\t\t\t\tflag=1;\n\t\t\t}\n\t\t\tif(val>=97&&val<=122){\n\t\t\t\tprintf(\"%c\",str[i]);\n\t\t\t\tflag=1;\n\t\t\t}\n\t\t\tif(val>=48&&val<=57){\n\t\t\t\tprintf(\"%c\",str[i]);\n\t\t\t\tflag=1;\n\t\t\t}\n\t\t\tif(str[i]==')'){\n\t\t\t\t\tprintf(\"%c\",stack[size-1]);\n\t\t\t\t\tflag=1;\n\t\t\t\t\tsize--;\n\t\t\t}\n\t\t\tif(str[i]=='(')\n\t\t\t\tflag=1;\n\t\t\tif(flag==0){\n\t\t\t\tstack[size]=str[i];\n\t\t\t\tsize++;\n\t\t\t}\n\t\t}\n\t\tprintf(\"\\n\");\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.6184210777282715,
"alphanum_fraction": 0.640350878238678,
"avg_line_length": 18.04166603088379,
"blob_id": "5e23a53ff2207af860d952775583a18911698373",
"content_id": "105114f205b174429a33341a4ae8112179034c37",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 456,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 24,
"path": "/coding/codechef/expense.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<cstdlib>\n#include<iomanip>\nusing namespace std;\nint main()\n{\n\tint n,i;\n\tcin >> n;\n\tfor(i=0;i<n;i++){\n\t\tint quantity,price;\n\t\tcin >> quantity;\n\t\tcin >> price;\n\t\tlong long int total=quantity*price;\n\t\tdouble discout,sum;\n\t\tsum=(double)total;\n\t\tif(quantity>1000){\n\t\t\tdiscout=(double)((double)total/10.0);\n\t\t\tsum=(double)((double)total-discout);\n\t\t}\n\t\tcout << std::setprecision(6)<< std::fixed;\n\t\tcout << sum << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5321100950241089,
"alphanum_fraction": 0.5412843823432922,
"avg_line_length": 13.714285850524902,
"blob_id": "f5107c21400e5ca8f41c8fce4c84b081037ba918",
"content_id": "46c2804e3b2de66f736cfca1c3a5b884a0eba19c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 218,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 14,
"path": "/coding/interviewbit/array/maxDistance.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\n\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstd::vector<int> A(n);\r\n\tfor (int i = 0; i < n; ++i){\r\n\t\tcin >> A[i];\r\n\t}\r\n\tint ret=maxDistance(A);\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.5794979333877563,
"alphanum_fraction": 0.6004183888435364,
"avg_line_length": 19.826086044311523,
"blob_id": "baa3107ea4d440540b93c3d8c912b9c9310e4568",
"content_id": "90fa348a0bff276d03827cbdc8a534da30dcb927",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 478,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 23,
"path": "/coding/geeksforgeeks/dynamic/dearrangement.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nlong long unsigned int cal(int n){\n\tlong long unsigned int count[n+1];\n\tint i;\n\tcount[0]=1;\n\tcount[1]=0;\n\tfor ( i = 2; i < n; ++i)\n\t{\n\t\tcount[i]=(i+1)*(count[i-1]+count[i-2]);\n\t\t/* code */\n\t}\n\treturn count[n];\n}\nint main(int argc, char const *argv[])\n{\n\tint n;\n\tprintf(\"enter number\\n\");\n\tscanf(\"%d\", &n);\n\tlong long unsigned int res=cal(n);\n\tprintf(\"result is %llu\\n\",res );\n\tprintf(\"time compelxity is (O(n))\\nAnd space compelxity is (O(n))\\n\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5061728358268738,
"alphanum_fraction": 0.5493826866149902,
"avg_line_length": 15.793103218078613,
"blob_id": "ad6b3213f8e1953ddb49858f4ad76d501656c452",
"content_id": "a72897db244cf930fe9e40c08fc26d150e9afacd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 486,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 29,
"path": "/coding/project euler/24.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nstd::vector<int> permute(std::vector<int> A){\n\tfor(int k=0;k<999999;k++){\n\tint i=A.size()-2;\n\twhile(i>=0&&A[i]>A[i+1])\n\t\ti--;\n\tint j=A.size()-1;\n\twhile(j>i){\n\t\tif(A[j]>A[i])\n\t\t\tbreak;\n\t\tj--;\n\t}\n\tswap(A[i],A[j]);\n\tsort(A.begin()+i+1,A.end());\n\t}\n\treturn A;\n}\nint main(){\t\n\tvector<int> A(10);\n\tfor(int i=0;i<10;i++)\n\t\tA[i]=i;\n\tstd::vector<int> v=permute(A);\n\tfor(int i=0;i<10;i++)\n\t\tcout << v[i];\n\tcout << endl;\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.45783132314682007,
"alphanum_fraction": 0.5879517793655396,
"avg_line_length": 17.086956024169922,
"blob_id": "ad3061e7f46d544c4209a4251f7ba95181eb7a75",
"content_id": "d0efc47f3e601f41f75e6d5d00f35b652dcc7840",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 415,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 23,
"path": "/coding/project euler/31.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "//1,2,5,10,20,50,100,200\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint main(){\t\n\tstd::vector<int> coin(8);\n\tcoin[0]=1;\n\tcoin[1]=2;\n\tcoin[2]=5;\n\tcoin[3]=10;\n\tcoin[4]=20;\n\tcoin[5]=50;\n\tcoin[6]=100;\n\tcoin[7]=200;\n\tstd::vector<int> table(201,0);\n\ttable[0]=1;\n\tfor(int i=0;i<8;i++){\n\t\tfor(int j=coin[i];j<=200;j++)\n\t\t\ttable[j]=table[j]+table[j-coin[i]];\n\t}\n\tcout << table[200] << endl;\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4276479482650757,
"alphanum_fraction": 0.4903033375740051,
"avg_line_length": 20.91111183166504,
"blob_id": "d78819f1f06b2196cd0f901418174bc1270ff188",
"content_id": "c604041ff58a843e6fd1508d315ae76f5aa2ac84",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2011,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 90,
"path": "/coding/interviewbit/array/flip.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nstd::vector<int> flip(string A){\r\n\tvector<int> temp;\r\n vector<int> result;\r\n for(int i=0;i<A.size();i++){\r\n if(A[i]=='0')\r\n temp.push_back(1);\r\n else temp.push_back(-1);\r\n }\r\n int sum_till=0,max_sum=0,start=-1,end=-1,l=0;\r\n for(int i=0;i<A.size();i++){\r\n if(sum_till+temp[i]<0){\r\n sum_till=0;\r\n l=i+1;\r\n //cout << l << endl;\r\n }\r\n else sum_till+=temp[i];\r\n if(sum_till>max_sum){\r\n max_sum=sum_till;\r\n start=l;\r\n end=i;\r\n }\r\n }\r\n if(start==-1||end==-1)\r\n return result;\r\n result.push_back(start+1);\r\n result.push_back(end+1);\r\n return result;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\treturn 0;\r\n\tstring A;\r\n\tcin >> A;\r\n\tstd::vector<int> v=flip(A);\r\n\tcout << v[0] << \" \" << v[1] << endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nstd::vector<int> flip(string A){\n\tvector<int> temp;\n vector<int> result;\n for(int i=0;i<A.size();i++){\n if(A[i]=='0')\n temp.push_back(1);\n else temp.push_back(-1);\n }\n int sum_till=0,max_sum=0,start=-1,end=-1,l=0;\n for(int i=0;i<A.size();i++){\n if(sum_till+temp[i]<0){\n sum_till=0;\n l=i+1;\n //cout << l << endl;\n }\n else sum_till+=temp[i];\n if(sum_till>max_sum){\n max_sum=sum_till;\n start=l;\n end=i;\n }\n }\n if(start==-1||end==-1)\n return result;\n result.push_back(start+1);\n result.push_back(end+1);\n return result;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\treturn 0;\n\tstring A;\n\tcin >> A;\n\tstd::vector<int> v=flip(A);\n\tcout << v[0] << \" \" << v[1] << endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5219047665596008,
"alphanum_fraction": 0.5333333611488342,
"avg_line_length": 17.13793182373047,
"blob_id": "172b6bea2a23a737eb1f8d901cd5a492900e7845",
"content_id": "f98e0a5e11a6b0275d2ecc04a435c679ff285cd2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 525,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 29,
"path": "/coding/codechef/SIMPSTAT.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <vector>\n#include <algorithm>\n#include <iomanip>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,n,k;\n\tdouble ans;\n\tcin >> t;\n\tstd::vector<int> num;\n\twhile(t--){\n\t\tcin >> n >> k;\n\t\tnum.resize(n);\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\t/* code */\n\t\t\tcin >> num[i];\n\t\t}\n\t\tsort(num.begin(),num.end());\n\t\tans=0;\n\t\tfor (int i = k; i <= n-1-k; ++i){\n\t\t\t/* code */\n\t\t\tans+=num[i];\n\t\t}\n\t\tans=(double)(ans/(n-2*k));\n\t\tcout << fixed;\n\t\tcout << setprecision(6) << ans << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5043478012084961,
"alphanum_fraction": 0.5043478012084961,
"avg_line_length": 22.95833396911621,
"blob_id": "1ce57fc27e2b6a18070cf58c52bc0c4468441153",
"content_id": "18ee2ed3f9f3ec5104d9f74eaed1827927662d71",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 575,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 24,
"path": "/coding/interviewbit/linkedList/removeDuplicat.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "/**\n * Definition for singly-linked list.\n * struct ListNode {\n * int val;\n * ListNode *next;\n * ListNode(int x) : val(x), next(NULL) {}\n * };\n */\nListNode* Solution::deleteDuplicates(ListNode* A) {\n ListNode *head = A;\n ListNode *runner;\n while(head){\n runner = head;\n if(runner->next && (runner->val == runner->next->val)){\n ListNode * temp = runner->next;\n runner->next = runner->next->next;\n temp->next = nullptr;\n } \n else {\n head = head->next;\n }\n }\n return A;\n}\n"
},
{
"alpha_fraction": 0.4316546618938446,
"alphanum_fraction": 0.4836745858192444,
"avg_line_length": 15.60377311706543,
"blob_id": "6ae33133e94b9528e321ee0d3cba89cf2a7117c3",
"content_id": "9e2f1df630daf01531653074cc76e21c694a0fe2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1807,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 106,
"path": "/coding/geeksforgeeks/sorting/selectionSort.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid print(std::vector<int> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcout << A[i] << \" \";\r\n\t}\r\n}\r\nint selectionSort(std::vector<int> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tint minimum=A[i];\r\n\t\tint idx=i;\r\n\t\t//cout << A[i] << \" \";\r\n\t\tfor (int j = i+1; j < A.size(); ++j)\r\n\t\t{\r\n\t\t\t/* code */\r\n\t\t\tif(A[j]<minimum){\r\n\t\t\t\tminimum=A[j];\r\n\t\t\t\tidx=j;\r\n\t\t\t}\r\n\t\t}\r\n\t\t//cout << A[i] << \" \";\r\n\t\tif(idx!=i)\r\n\t\tswap(A[i],A[idx]);\r\n\t\t\r\n\t}\r\n\tprint(A);\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstd::vector<int> A(n);\r\n\tcout << \"enter array\\n\";\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> A[i];\r\n\t}\r\n\tcout << \"usorted array\\n\";\r\n\tprint(A);\r\n\tcout << \"\\nsorted array\\n\";\r\n\tselectionSort(A);\r\n\t//print(A);\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid print(std::vector<int> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tcout << A[i] << \" \";\n\t}\n}\nint selectionSort(std::vector<int> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tint minimum=A[i];\n\t\tint idx=i;\n\t\t//cout << A[i] << \" \";\n\t\tfor (int j = i+1; j < A.size(); ++j)\n\t\t{\n\t\t\t/* code */\n\t\t\tif(A[j]<minimum){\n\t\t\t\tminimum=A[j];\n\t\t\t\tidx=j;\n\t\t\t}\n\t\t}\n\t\t//cout << A[i] << \" \";\n\t\tif(idx!=i)\n\t\tswap(A[i],A[idx]);\n\t\t\n\t}\n\tprint(A);\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tstd::vector<int> A(n);\n\tcout << \"enter array\\n\";\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tcin >> A[i];\n\t}\n\tcout << \"usorted array\\n\";\n\tprint(A);\n\tcout << \"\\nsorted array\\n\";\n\tselectionSort(A);\n\t//print(A);\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5663474798202515,
"alphanum_fraction": 0.5841313004493713,
"avg_line_length": 21.18181800842285,
"blob_id": "396aceec664a3df1916353bebcb11de265d8f980",
"content_id": "cfb3a06eb8dd798eba981ed9ea1942a6f80f01a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 731,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 33,
"path": "/coding/interviewbit/array/pascal_triangle.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<vector>\nusing namespace std;\nstd::vector<std::vector<int> > generate(int num){\n\tstd::vector<std::vector<int> > result;\n\tstd::vector<int> v;\n\tv.push_back(1);\n\tresult.push_back(v);\n\tfor (int i = 2; i <= num; ++i){\n\t\tstd::vector<int> inner;\n\t\tinner.push_back(1);\n\t\tfor(int j=0;j<result[i-2].size()-1;j++){\n\t\t\tint num=result[i-2][j]+result[i-2][j+1];\n\t\t\tinner.push_back(num);\n\t\t}\n\t\tinner.push_back(1);\n\t\tresult.push_back(inner);\n\t}\n\treturn result;\n}\nint main(){\n\tint num;\n\tcin >> num;\n\tstd::vector<std::vector<int> > result=generate(num);\n\tfor (int i = 0; i < result.size(); ++i){\n\t\t/* code */\n\t\tfor (int j = 0; j < result[i].size(); ++j){\n\t\t\tcout << result[i][j] << \" \";\n\t\t}\n\t\tcout << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4841628968715668,
"alphanum_fraction": 0.49321267008781433,
"avg_line_length": 21.100000381469727,
"blob_id": "7c64495a919397d40a1fbe73b65d5e0051629d35",
"content_id": "9542511abb0dfacdf81117710177b1681967d93d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 663,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 30,
"path": "/coding/interviewbit/linkedList/rotateList.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "/**\n * Definition for singly-linked list.\n * struct ListNode {\n * int val;\n * ListNode *next;\n * ListNode(int x) : val(x), next(NULL) {}\n * };\n */\nListNode* Solution::rotateRight(ListNode* A, int B) {\n ListNode *runner = A, *prev;\n int count = 0;\n while(runner){\n count++;\n prev = runner;\n runner = runner->next;\n }\n if(count == 0 || count == 1 || B == 0 || count == B)\n return A;\n B %= count;\n B = count - B - 1;\n runner = A;\n while(B > 0){\n runner = runner ->next;\n B--;\n }\n ListNode *temp = runner->next;\n runner->next = nullptr;\n prev->next = A;\n return temp;\n}\n"
},
{
"alpha_fraction": 0.5170691013336182,
"alphanum_fraction": 0.5253955125808716,
"avg_line_length": 17.40322494506836,
"blob_id": "5daa796019941a77ce52f5b24fc00a2b7e6e7fb2",
"content_id": "08824b6bc54253deb6e6d90fc9927b8d77572326",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1201,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 62,
"path": "/coding/interviewbit/hash/equal.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nstruct Form\r\n{\r\n\tint i,j;\r\n\tint sum;\r\n};\r\ntypedef struct Form form;\r\nstd::vector<int> solution(std::vector<int> A){\r\n\tstd::vector<int> result;\r\n\tint n=(A.size()*(A.size()-1))/2;\r\n\tvector<form> temp(n);\r\n\tif(A.size()<4)\r\n\t\treturn result;\r\n\tint k=0;\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\tfor(int j=i+1;j<A.size();j++){\r\n\t\t\ttemp[k].sum=A[i]+A[j];\r\n\t\t\ttemp[k].i=i;\r\n\t\t\ttemp[k].j=j;\r\n\t\t\tk++;\r\n\t\t}\r\n\t}\r\n\tmap<int,form> mp;\r\n\tmap<int ,form> ::iterator it;\r\n\tfor (int i = 0; i < temp.size(); ++i){\r\n\t\tint key=temp[i].sum;\r\n\t\tit=mp.find(key);\r\n\t\tif(it!=mp.end()){\r\n\t\t\tint first=it->second.i;\r\n\t\t\tint second=it->second.j;\r\n\t\t\tint third=temp[i].i;\r\n\t\t\tint fourth=temp[i].j;\r\n\t\t\tresult.push_back(first);\r\n\t\t\tresult.push_back(second);\r\n\t\t\tresult.push_back(third);\r\n\t\t\tresult.push_back(fourth);\r\n\t\t\treturn result;\r\n\t\t}\r\n\t\tmp.insert(pair<int,form>(key,temp[i]));\r\n\t}\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstd::vector<int> A(n);\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> A[i];\r\n\t}\r\n\tstd::vector<int> v=solution(A);\r\n\tfor (int i = 0; i < v.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcout << v[i] << \" \";\r\n\t}\r\n\tcout << endl;\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.4076704680919647,
"alphanum_fraction": 0.49715909361839294,
"avg_line_length": 25.11111068725586,
"blob_id": "fefa7f684bfa1cd27b110106585e69cb8187b925",
"content_id": "effc82d20ec0831e1140db09448c1f3ea1119f26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 704,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 27,
"path": "/coding/codechef/communication.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main(){\n\tint n,i,j;\n\tscanf(\"%d\", &n);\n\twhile(n--){\n\t\tint distance,cor[3][2],count=0;\n\t\tscanf(\"%d\", &distance);\n\t\tdistance=distance*distance;\n\t\tfor(i=0;i<3;i++)\n\t\t\tfor(j=0;j<2;j++)\n\t\t\t\tscanf(\"%d\", &cor[i][j]);\n\t\tint d1=((cor[0][0]-cor[1][0])*(cor[0][0]-cor[1][0]))+((cor[0][1]-cor[1][1])*(cor[0][1]-cor[1][1]));\n\t\tint d2=((cor[0][0]-cor[2][0])*(cor[0][0]-cor[2][0]))+((cor[0][1]-cor[2][1])*(cor[0][1]-cor[2][1]));\n\t\tint d3=((cor[2][0]-cor[1][0])*(cor[2][0]-cor[1][0]))+((cor[2][1]-cor[1][1])*(cor[2][1]-cor[1][1]));\n\t\tif(d1>distance)\n\t\t\tcount++;\n\t\tif(d2>distance)\n\t\t\tcount++;\n\t\tif (d3>distance)\n\t\t\tcount++;\n\t\tif(count>=2)\n\t\t\tprintf(\"no\\n\");\n\t\telse\n\t\t\tprintf(\"yes\\n\");\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5033039450645447,
"alphanum_fraction": 0.5671806335449219,
"avg_line_length": 16.019229888916016,
"blob_id": "28b72732eb01bd8a224eb2cae6c7f70a8ec3136a",
"content_id": "ddaa334da52fe31fb69195596e580388e461fa52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1816,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 104,
"path": "/coding/interviewbit/string/longestPalindrome.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nstring palindrome(string str){\r\n\tif(str.size()==0)\r\n\t\treturn \"\";\r\n\tint len=str.size();\r\n\tint start=0;\r\n\tint length=1;\r\n\tint i,low,high;\r\n\tfor (i=1;i<len;i++)\r\n\t{\r\n\t\tlow=i-1;\r\n\t\thigh=i;\r\n\t\twhile(low>=0&&high<len&&(str[low]==str[high])){\r\n\t\t\tif(length<high-low+1){\r\n\t\t\t\tstart=low;\r\n\t\t\t\tlength=high-low+1;\r\n\t\t\t}\r\n\t\t\tlow--;\r\n\t\t\thigh++;\r\n\t\t}\r\n\r\n\t\tlow=i-1;\r\n\t\thigh=i+1;\r\n\t\twhile(low>=0&&high<len&&(str[low]==str[high])){\r\n\t\t\tif(length<high-low+1){\r\n\t\t\t\tstart=low;\r\n\t\t\t\tlength=high-low+1;\r\n\t\t\t}\r\n\t\t\tlow--;\r\n\t\t\thigh++;\r\n\t\t}\r\n\t}\t\r\n\tstring result=\"\";\r\n\tfor(int i=0;i<length;i++)\r\n\t\tresult=result+string(1,str[start+i]);\r\n\treturn result;\r\n}\r\nint main(){\r\n\tint n;\r\n\t//cin >> n;\t\r\n\tstring str;\r\n\tcin >> str;\r\n\tstring res=palindrome(str);\r\n\tcout << res<<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nstring palindrome(string str){\n\tif(str.size()==0)\n\t\treturn \"\";\n\tint len=str.size();\n\tint start=0;\n\tint length=1;\n\tint i,low,high;\n\tfor (i=1;i<len;i++)\n\t{\n\t\tlow=i-1;\n\t\thigh=i;\n\t\twhile(low>=0&&high<len&&(str[low]==str[high])){\n\t\t\tif(length<high-low+1){\n\t\t\t\tstart=low;\n\t\t\t\tlength=high-low+1;\n\t\t\t}\n\t\t\tlow--;\n\t\t\thigh++;\n\t\t}\n\n\t\tlow=i-1;\n\t\thigh=i+1;\n\t\twhile(low>=0&&high<len&&(str[low]==str[high])){\n\t\t\tif(length<high-low+1){\n\t\t\t\tstart=low;\n\t\t\t\tlength=high-low+1;\n\t\t\t}\n\t\t\tlow--;\n\t\t\thigh++;\n\t\t}\n\t}\t\n\tstring result=\"\";\n\tfor(int i=0;i<length;i++)\n\t\tresult=result+string(1,str[start+i]);\n\treturn result;\n}\nint main(){\n\tint n;\n\t//cin >> n;\t\n\tstring str;\n\tcin >> str;\n\tstring res=palindrome(str);\n\tcout << res<<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5295023322105408,
"alphanum_fraction": 0.5818368196487427,
"avg_line_length": 20.22222137451172,
"blob_id": "c8c29716f76f1f9de1d7b312a4ae6a3ea3067af8",
"content_id": "902e6f24e8d2d235dbab11a3e301bed1791e1b91",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1949,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 90,
"path": "/coding/interviewbit/backtracking/generateSubset.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid func(vector<int> &A,vector<int> &cur,vector<vector<int> > &ret,int k){\r\n\tif(k==A.size()){\r\n\t\tret.push_back(cur);\r\n\t\treturn ;\r\n\t}\r\n\tfunc(A,cur,ret,k+1);\r\n\tcur.push_back(A[k]);\r\n\tfunc(A,cur,ret,k+1);\r\n\tcur.pop_back();\r\n}\r\nbool compare(int a,int b){\r\n\treturn a<b;\r\n}\r\nvector<vector<int> > evalute(std::vector<int>A){\r\n\tvector<vector<int> > ret;\r\n\tsort(A.begin(),A.end(),compare);\r\n\tvector<int> cur;\r\n\tfunc(A,cur,ret,0);\r\n\tsort(ret.begin(),ret.end());\r\n\tfor(int i=1;i<ret.size();i++)\r\n\t\tif(ret[i]==ret[i-1])\r\n\t\t\tret.erase(ret.begin()+i);\r\n\treturn ret;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tvector<vector<int> > ret=evalute(A);\r\n\tfor(int i=0;i<ret.size();i++){\r\n\t\tfor(int j=0;j<ret[i].size();j++)\r\n\t\t\tcout << ret[i][j] << \" \";\r\n\t\tcout << endl;\r\n\t}\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid func(vector<int> &A,vector<int> &cur,vector<vector<int> > &ret,int k){\n\tif(k==A.size()){\n\t\tret.push_back(cur);\n\t\treturn ;\n\t}\n\tfunc(A,cur,ret,k+1);\n\tcur.push_back(A[k]);\n\tfunc(A,cur,ret,k+1);\n\tcur.pop_back();\n}\nbool compare(int a,int b){\n\treturn a<b;\n}\nvector<vector<int> > evalute(std::vector<int>A){\n\tvector<vector<int> > ret;\n\tsort(A.begin(),A.end(),compare);\n\tvector<int> cur;\n\tfunc(A,cur,ret,0);\n\tsort(ret.begin(),ret.end());\n\tfor(int i=1;i<ret.size();i++)\n\t\tif(ret[i]==ret[i-1])\n\t\t\tret.erase(ret.begin()+i);\n\treturn ret;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tvector<vector<int> > ret=evalute(A);\n\tfor(int i=0;i<ret.size();i++){\n\t\tfor(int j=0;j<ret[i].size();j++)\n\t\t\tcout << ret[i][j] << \" \";\n\t\tcout << endl;\n\t}\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4830188751220703,
"alphanum_fraction": 0.5320754647254944,
"avg_line_length": 11.666666984558105,
"blob_id": "c4073e3580f1bf55b5492b881f4c65c3859f8a3c",
"content_id": "fa53a9d39006a86736ed24ad5572158496ed20c1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 265,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 21,
"path": "/coding/codechef/CIELRCPT.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<cstdlib>\nusing namespace std;\nint main(){\n\tint t;\n\tcin >> t;\n\twhile(t--){\n\t\tint p;\n\t\tcin >> p;\n\t\tint count=p/2048;\n\t\tp=p%2048;\n\t\twhile(p!=0){\n\t\t\tint rm=p%2;\n\t\t\tp=p/2;\n\t\t\tif(rm==1)\n\t\t\t\tcount++;\n\t\t}\n\t\tcout << count << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.38857144117355347,
"alphanum_fraction": 0.45257142186164856,
"avg_line_length": 13.637930870056152,
"blob_id": "6b64820fe2b33828c85cd0411381f13f98d70263",
"content_id": "14844d1d59cb84d97a8b9a8d633a0dc2100fec99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1750,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 116,
"path": "/coding/interviewbit/DP/regularExpression.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nbool match(char *s,char *p){\r\n\tchar *star=NULL;\r\n\tchar *ss=s;\r\n\twhile(*s){\r\n\t\tif((*p=='?')||(*p==*s)){\r\n\t\t\tp++;\r\n\t\t\ts++;\r\n\t\t\tcontinue;\r\n\t\t}\r\n\t\tif(*p=='*'){\r\n\t\t\tstar=p++;\r\n\t\t\tss=s;\r\n\t\t\tcontinue;\r\n\t\t}\r\n\t\tif(star){\r\n\t\t\tp=star+1;\r\n\t\t\ts=++ss;\r\n\t\t\tcontinue;\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n\twhile(*p!='\\0'&&*p=='*')\r\n\t\tp++;\r\n\tif(*p=='\\0')\r\n\t\treturn true;\r\n\treturn false;\r\n}\r\nint main(){\r\n\t//int n;\r\n\t//cin >> n;\t\r\n\tchar s[100],p[100];\r\n\tint n,m;\r\n\tcin >> n >> m;\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> s[i];\r\n\t}\r\n\ts[n]='\\0';\r\n\tfor (int i = 0; i < m; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> p[i];\r\n\t}\r\n\tp[m]='\\0';\r\n\t//cin >> s >> p;\r\n\t//cout << s <<endl << p << endl;\r\n\tbool flag=match(s,p);\r\n\tcout << flag <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nbool match(char *s,char *p){\n\tchar *star=NULL;\n\tchar *ss=s;\n\twhile(*s){\n\t\tif((*p=='?')||(*p==*s)){\n\t\t\tp++;\n\t\t\ts++;\n\t\t\tcontinue;\n\t\t}\n\t\tif(*p=='*'){\n\t\t\tstar=p++;\n\t\t\tss=s;\n\t\t\tcontinue;\n\t\t}\n\t\tif(star){\n\t\t\tp=star+1;\n\t\t\ts=++ss;\n\t\t\tcontinue;\n\t\t}\n\t\treturn false;\n\t}\n\twhile(*p!='\\0'&&*p=='*')\n\t\tp++;\n\tif(*p=='\\0')\n\t\treturn true;\n\treturn false;\n}\nint main(){\n\t//int n;\n\t//cin >> n;\t\n\tchar s[100],p[100];\n\tint n,m;\n\tcin >> n >> m;\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tcin >> s[i];\n\t}\n\ts[n]='\\0';\n\tfor (int i = 0; i < m; ++i)\n\t{\n\t\t/* code */\n\t\tcin >> p[i];\n\t}\n\tp[m]='\\0';\n\t//cin >> s >> p;\n\t//cout << s <<endl << p << endl;\n\tbool flag=match(s,p);\n\tcout << flag <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5625234842300415,
"alphanum_fraction": 0.6000751256942749,
"avg_line_length": 16.78082275390625,
"blob_id": "a0eedc839de6c70ad4f46b481a0719a2092a570f",
"content_id": "04dd28c58f9a89dcee8e088c8b9110add7db3647",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2663,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 146,
"path": "/coding/geeksforgeeks/build_tree.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\n\r\nstruct Node\r\n{\r\n\tint key;\r\n\tNode *left,*right;\r\n};\r\ntypedef struct Node node;\r\n\r\nnode *newnode(int key){\r\n\tnode *temp=(node *)malloc(sizeof(node));\r\n\ttemp->key=key;\r\n\ttemp->left=NULL;\r\n\ttemp->right=NULL;\r\n\treturn temp;\r\n}\r\nint search(vector<int> in,int start,int end,int key){\r\n\tint i;\r\n\tfor (i = start; i <= end; ++i)\r\n\t{\r\n\t\r\n\t\tif(in[i]==key)\r\n\t\t\treturn i;\r\n\t}\r\n\treturn i;\r\n}\r\nnode *tree(vector<int> in,vector<int> pos,int start,int end,int *idx){\r\n\tif(start>end)\r\n\t\treturn NULL;\r\n\tnode *root=newnode(pos[*idx]);\r\n\t(*idx)--;\r\n\tif (start==end)\r\n\t\treturn root;\r\n\tint index=search(in,start,end,root->key);\r\n\troot->right=tree(in,pos,index+1,end,idx);\r\n\troot->left=tree(in,pos,start,index-1,idx);\r\n\treturn root;\r\n}\r\nnode *buildTree(vector<int> in,vector<int> pos,int n){\r\n\tint idx=n-1;\r\n\treturn tree(in,pos,0,n-1,&idx);\r\n}\r\nvoid print(node *root){\r\n\tif(root==NULL)\r\n\t\treturn;\r\n\tcout << root->key << \" \";\r\n\tprint(root->left);\r\n\tprint(root->right);\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >>n;\r\n\tstd::vector<int> in(n),pos(n);\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\r\n\t\tcin >> in[i];\r\n\t}\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\r\n\t\tcin >> pos[i];\r\n\t}\r\n\tnode *root=buildTree(in,pos,n);\r\n\tprint(root);\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\n\nstruct Node\n{\n\tint key;\n\tNode *left,*right;\n};\ntypedef struct Node node;\n\nnode *newnode(int key){\n\tnode *temp=(node *)malloc(sizeof(node));\n\ttemp->key=key;\n\ttemp->left=NULL;\n\ttemp->right=NULL;\n\treturn temp;\n}\nint search(vector<int> in,int start,int end,int key){\n\tint i;\n\tfor (i = start; i <= end; ++i)\n\t{\n\t\n\t\tif(in[i]==key)\n\t\t\treturn i;\n\t}\n\treturn i;\n}\nnode *tree(vector<int> in,vector<int> pos,int start,int end,int *idx){\n\tif(start>end)\n\t\treturn NULL;\n\tnode *root=newnode(pos[*idx]);\n\t(*idx)--;\n\tif (start==end)\n\t\treturn root;\n\tint index=search(in,start,end,root->key);\n\troot->right=tree(in,pos,index+1,end,idx);\n\troot->left=tree(in,pos,start,index-1,idx);\n\treturn root;\n}\nnode *buildTree(vector<int> in,vector<int> pos,int n){\n\tint idx=n-1;\n\treturn tree(in,pos,0,n-1,&idx);\n}\nvoid print(node *root){\n\tif(root==NULL)\n\t\treturn;\n\tcout << root->key << \" \";\n\tprint(root->left);\n\tprint(root->right);\n}\nint main(){\n\tint n;\n\tcin >>n;\n\tstd::vector<int> in(n),pos(n);\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\n\t\tcin >> in[i];\n\t}\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\n\t\tcin >> pos[i];\n\t}\n\tnode *root=buildTree(in,pos,n);\n\tprint(root);\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4894651472568512,
"alphanum_fraction": 0.5705024600028992,
"avg_line_length": 17.875,
"blob_id": "241a44f410ca7439075a9e1c9c9434ee9b39f932",
"content_id": "837628be3b1abc57cde8a8790e73defe8a96c412",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1234,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 64,
"path": "/coding/interviewbit/DP/infiniteCoinSum.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint calculate(std::vector<int> A,int sum){\r\n\tstd::vector<int> table(sum+1,0);\r\n\t//memset(table,0,sizeof(table));\r\n\ttable[0]=1;\r\n\tfor(int i=0;i<A.size();i++)\r\n\t\tfor(int j=A[i];j<=sum;j++)\r\n\t\t\ttable[j]+=table[j-A[i]];\r\n\treturn table[sum];\r\n\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tstd::vector<int> A(n);\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> A[i];\r\n\t}\r\n\tint sum;\r\n\tcin >> sum;\r\n\tint sol=calculate(A,sum);\r\n\tcout << sol <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint calculate(std::vector<int> A,int sum){\n\tstd::vector<int> table(sum+1,0);\n\t//memset(table,0,sizeof(table));\n\ttable[0]=1;\n\tfor(int i=0;i<A.size();i++)\n\t\tfor(int j=A[i];j<=sum;j++)\n\t\t\ttable[j]+=table[j-A[i]];\n\treturn table[sum];\n\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tstd::vector<int> A(n);\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tcin >> A[i];\n\t}\n\tint sum;\n\tcin >> sum;\n\tint sol=calculate(A,sum);\n\tcout << sol <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5059062838554382,
"alphanum_fraction": 0.5490835309028625,
"avg_line_length": 19.355932235717773,
"blob_id": "1ba02d8f259da5e1894422348d9060813887c946",
"content_id": "2e5fd3cfe780ae9c5b2ac4a3a96850a84ffb6b9b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2455,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 118,
"path": "/coding/geeksforgeeks/anagram_generate.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid print(std::vector<string> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcout << A[i] << \" \";\r\n\t}\r\n\tcout <<endl;\r\n}\r\nstd::vector<vector<int> > evalute(std::vector<string> A){\r\n\tstd::map<string, int> m;\r\n\tstd::map<string, int>::iterator it;\r\n\tstd::vector<vector<int> > result;\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tstring temp=A[i];\r\n\t\tsort(temp.begin(),temp.end());\r\n\t\t//cout << temp <<endl;\r\n\t\tit=m.find(temp);\r\n\t\tstd::vector<int> v;\r\n\t\tint flag=0;\r\n\t\tif(it!=m.end()){\r\n\t\t\t//cout << temp << \" \"<<it->first <<endl;\r\n\t\t\tv.push_back(it->second+1);\r\n\t\t\tv.push_back(i+1);\r\n\t\t\tflag=1;\r\n\t\t\tresult.push_back(v);\r\n\t\t}\r\n\t\tif(flag==0)\r\n\t\t\tm.insert(pair<string,int>(temp,i));\r\n\t}\r\n\treturn result;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstd::vector<string> A;\r\n\twhile(n--){\r\n\t\tstring str;\r\n\t\tcin >>str;\r\n\t\tA.push_back(str);\r\n\t}\t\r\n\t//print(A);\r\n\tstd::vector<vector<int> > result=evalute(A);\r\n\tsort(result.begin(),result.end());\r\n\tfor (int i = 0; i < result.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcout << result[i][0]<<\" \"<<result[i][1]<<endl;\r\n\t}\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid print(std::vector<string> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tcout << A[i] << \" \";\n\t}\n\tcout <<endl;\n}\nstd::vector<vector<int> > evalute(std::vector<string> A){\n\tstd::map<string, int> m;\n\tstd::map<string, int>::iterator it;\n\tstd::vector<vector<int> > result;\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tstring temp=A[i];\n\t\tsort(temp.begin(),temp.end());\n\t\t//cout << temp <<endl;\n\t\tit=m.find(temp);\n\t\tstd::vector<int> v;\n\t\tint flag=0;\n\t\tif(it!=m.end()){\n\t\t\t//cout << temp << \" \"<<it->first <<endl;\n\t\t\tv.push_back(it->second+1);\n\t\t\tv.push_back(i+1);\n\t\t\tflag=1;\n\t\t\tresult.push_back(v);\n\t\t}\n\t\tif(flag==0)\n\t\t\tm.insert(pair<string,int>(temp,i));\n\t}\n\treturn result;\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tstd::vector<string> A;\n\twhile(n--){\n\t\tstring str;\n\t\tcin >>str;\n\t\tA.push_back(str);\n\t}\t\n\t//print(A);\n\tstd::vector<vector<int> > result=evalute(A);\n\tsort(result.begin(),result.end());\n\tfor (int i = 0; i < result.size(); ++i)\n\t{\n\t\t/* code */\n\t\tcout << result[i][0]<<\" \"<<result[i][1]<<endl;\n\t}\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.46356695890426636,
"alphanum_fraction": 0.5357390642166138,
"avg_line_length": 15.325581550598145,
"blob_id": "6ce8ad2f9dedf20572d3726310935a5976c223f0",
"content_id": "d4defa21264d8406a6f0b12bd9b36a7c2a376919",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1441,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 86,
"path": "/coding/geeksforgeeks/sorting/rotatedArraySearch.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint search(std::vector<int> A,int key){\r\n\tint low=0,high=A.size()-1;\r\n\twhile(low<=high){\r\n\t\tint mid=(low+high)/2;\r\n\t\tif(A[mid]==key)\r\n\t\t\treturn mid;\r\n\t\tif(A[low]<=A[mid]){\r\n\t\t\tif(key>=A[low]&&key<=A[mid])\r\n\t\t\t\thigh=mid-1;\r\n\t\t\telse\r\n\t\t\t\tlow=mid+1;\r\n\t\t}\r\n\t\telse{\r\n\t\t\tif(key>=A[mid]&&key<=A[high])\r\n\t\t\t\tlow=mid+1;\r\n\t\t\telse\r\n\t\t\t\thigh=mid-1;\r\n\t\t}\r\n\t}\r\n\treturn -1;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstd::vector<int> A(n);\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> A[i];\r\n\t}\r\n\tint key;\r\n\tcin >> key;\r\n\tint res=search(A,key);\r\n\tcout << res;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint search(std::vector<int> A,int key){\n\tint low=0,high=A.size()-1;\n\twhile(low<=high){\n\t\tint mid=(low+high)/2;\n\t\tif(A[mid]==key)\n\t\t\treturn mid;\n\t\tif(A[low]<=A[mid]){\n\t\t\tif(key>=A[low]&&key<=A[mid])\n\t\t\t\thigh=mid-1;\n\t\t\telse\n\t\t\t\tlow=mid+1;\n\t\t}\n\t\telse{\n\t\t\tif(key>=A[mid]&&key<=A[high])\n\t\t\t\tlow=mid+1;\n\t\t\telse\n\t\t\t\thigh=mid-1;\n\t\t}\n\t}\n\treturn -1;\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tstd::vector<int> A(n);\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tcin >> A[i];\n\t}\n\tint key;\n\tcin >> key;\n\tint res=search(A,key);\n\tcout << res;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.40551888942718506,
"alphanum_fraction": 0.48710256814956665,
"avg_line_length": 17.953489303588867,
"blob_id": "ca4c716c1dff93541c19301ac987f8da1ab921c7",
"content_id": "2caa5f5a88a80c61e8d6223d0e6aba64451aae7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1667,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 86,
"path": "/coding/interviewbit/DP/minSumMatrix.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint health(std::vector<std::vector<int> > A ){\r\n\tint n=A.size();\r\n\tif(n==0)\r\n\t\treturn 0;\r\n\tint m=A[0].size();\r\n\tfor(int i=m-2;i>=0;i--){\r\n\t\tA[n-1][i]=A[n-1][i+1]+A[n-1][i];\r\n\t}\r\n\tfor(int i=n-2;i>=0;i--)\r\n\t\tA[i][m-1]=A[i][m-1]+A[i+1][m-1];\r\n\tfor(int i=n-2;i>=0;i--){\r\n\t\tfor(int j=m-2;j>=0;j--){\r\n\t\t\tA[i][j]=A[i][j]+min(A[i+1][j],A[i][j+1]);\r\n\t\t}\r\n\t}\r\n\treturn A[0][0];\r\n}\r\nint main(){\r\n\tint n,m;\r\n\tcin >> n;\r\n\tcin >> m;\t\r\n\tstd::vector<std::vector<int> > A(n,std::vector<int>(m));\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tfor (int j = 0; j < m; ++j)\r\n\t\t{\r\n\t\t\t/* code */\r\n\t\t\tcin >> A[i][j];\r\n\t\t}\r\n\t}\r\n\tcout << endl ;\r\n\tint sol=health(A);\r\n\tcout << sol << endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint health(std::vector<std::vector<int> > A ){\n\tint n=A.size();\n\tif(n==0)\n\t\treturn 0;\n\tint m=A[0].size();\n\tfor(int i=m-2;i>=0;i--){\n\t\tA[n-1][i]=A[n-1][i+1]+A[n-1][i];\n\t}\n\tfor(int i=n-2;i>=0;i--)\n\t\tA[i][m-1]=A[i][m-1]+A[i+1][m-1];\n\tfor(int i=n-2;i>=0;i--){\n\t\tfor(int j=m-2;j>=0;j--){\n\t\t\tA[i][j]=A[i][j]+min(A[i+1][j],A[i][j+1]);\n\t\t}\n\t}\n\treturn A[0][0];\n}\nint main(){\n\tint n,m;\n\tcin >> n;\n\tcin >> m;\t\n\tstd::vector<std::vector<int> > A(n,std::vector<int>(m));\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tfor (int j = 0; j < m; ++j)\n\t\t{\n\t\t\t/* code */\n\t\t\tcin >> A[i][j];\n\t\t}\n\t}\n\tcout << endl ;\n\tint sol=health(A);\n\tcout << sol << endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.49861112236976624,
"alphanum_fraction": 0.5152778029441833,
"avg_line_length": 16.975000381469727,
"blob_id": "381c43a2b67365531a840ff1a58c8c78e05e0fb7",
"content_id": "439278981f51b054d7c65e40fe08dac4d01911e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 720,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 40,
"path": "/coding/interview_maxnonnegative_subarray.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<cstdlib>\nusing namespace std;\nint main(){\n\tint array[15],i,len;\n cin >> len;\n\tfor (i = 0; i <len; ++i)\n\t{\n\t\tcin >> array[i];\n\t}\n\tlong long unsigned int sum=0,current_sum=0,index_l=0,index_h=0,idx_x,idx_y;\n\tfor(i=0;i<len;i++){\n\t\tif(array[i]>=0){\n\t\t\tcurrent_sum+=array[i];\n\t\t\tindex_h=i;\n\t\t\tif (current_sum>=sum)\n\t\t\t{\n\t\t\t\t/* code */\n\t\t\t\tsum=current_sum;\n\t\t\t\tidx_x=index_l;\n\t\t\t\tidx_y=index_h;\n\t\t\t}\n\t\t\telse if (sum==current_sum)\n\t\t\t{\n\t\t\t\t/* code */\n\t\t\t\tint diff=(index_h-index_l)>(idx_y-idx_x);\n\t\t\t\tif(diff>0){\n\t\t\t\t\tsum=current_sum;\n\t\t\t\t\tidx_y=index_h;\n\t\t\t\t\tidx_x=index_l;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\telse{\n\t\t\tcurrent_sum=0;\n\t\t\tindex_l=i+1;\n\t\t}\n\t}\n\tcout << sum << \" \" << idx_x << \" \"<< idx_y << \"\\n\";\n}\n\t"
},
{
"alpha_fraction": 0.482674777507782,
"alphanum_fraction": 0.5483282804489136,
"avg_line_length": 16.844444274902344,
"blob_id": "d76c0520bf240053b0c1616dcfaf2d37219b6c1a",
"content_id": "874dc77b6700e7aa5caea7360958b7cd2536c8cd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1645,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 90,
"path": "/coding/interviewbit/array/findPermutation.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid print(std::vector<int> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\tcout << A[i] << \" \";\r\n\t}\r\n\tcout << endl;\r\n}\r\nstd::vector<int> findPermutation(int n,string str){\r\n\tint max=1,min=1;\r\n\tstd::vector<int> result(1,1);\r\n\tfor(int i=0;i<str.size();i++){\r\n\t\tif(str[i]=='D'){\r\n\t\t\tresult.push_back(min-1);\r\n\t\t\tmin--;\r\n\t\t}\r\n\t\telse if(str[i]=='I'){\r\n\t\t\tresult.push_back(max+1);\r\n\t\t\tmax++;\r\n\t\t}\r\n\t}\r\n\tif(min<=0){\r\n\t\tfor (int i = 0; i < result.size(); ++i)\r\n\t\t{\r\n\t\t\t/* code */\r\n\t\t\tresult[i]=result[i]+abs(min)+1;\r\n\t\t}\r\n\t}\r\n\treturn result;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstring str;\r\n\tcin >>str;\t\r\n\tstd::vector<int> A=findPermutation(n,str);\r\n\tprint(A);\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid print(std::vector<int> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\tcout << A[i] << \" \";\n\t}\n\tcout << endl;\n}\nstd::vector<int> findPermutation(int n,string str){\n\tint max=1,min=1;\n\tstd::vector<int> result(1,1);\n\tfor(int i=0;i<str.size();i++){\n\t\tif(str[i]=='D'){\n\t\t\tresult.push_back(min-1);\n\t\t\tmin--;\n\t\t}\n\t\telse if(str[i]=='I'){\n\t\t\tresult.push_back(max+1);\n\t\t\tmax++;\n\t\t}\n\t}\n\tif(min<=0){\n\t\tfor (int i = 0; i < result.size(); ++i)\n\t\t{\n\t\t\t/* code */\n\t\t\tresult[i]=result[i]+abs(min)+1;\n\t\t}\n\t}\n\treturn result;\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tstring str;\n\tcin >>str;\t\n\tstd::vector<int> A=findPermutation(n,str);\n\tprint(A);\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4794238805770874,
"alphanum_fraction": 0.5020576119422913,
"avg_line_length": 15.793103218078613,
"blob_id": "24421ffc06f7c3817bffacf1d7271b5be281d2a2",
"content_id": "faac303e0a7ee23d95f0c6f863ae4b655ac84acb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 486,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 29,
"path": "/coding/codechef/SUBINC.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <vector>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,n;\n\tcin >> t;\n\tstd::vector<int> num,sum;\n\twhile(t--){\n\t\tcin >> n;\n\t\tnum.resize(n);\n\t\tsum.resize(n);\n\t\tsum[0]=1;\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\tcin >> num[i];\n\t\t}\n\t\tfor (int i = 1; i < n; ++i){\n\t\t\tif(num[i-1]<=num[i])\n\t\t\t\tsum[i]=sum[i-1]+1;\n\t\t\telse\n\t\t\t\tsum[i]=1;\n\t\t}\n\t\tlong long int res=0;\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\tres+=sum[i];\n\t\t}\n\t\tcout << res <<endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5160829424858093,
"alphanum_fraction": 0.5889921188354492,
"avg_line_length": 19.787878036499023,
"blob_id": "1a629c21f5afc12f668ae3a31054b2ac65c185f8",
"content_id": "98ab28fe804e40444c05f54a7f671f74fee3c32f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1399,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 66,
"path": "/coding/interviewbit/DP/maxProduct.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint maxProduct(std::vector<int> A){\r\n\tif(A.empty())\r\n\t\treturn 0;\r\n\tif(A.size()==1)\r\n\t\treturn A[0];\r\n\tint ans=A[0];\r\n\tint prev_max=A[0],prev_min=A[0];\r\n\tfor(int i=1;i<A.size();i++){\r\n\t\tint cur_max=max(max(prev_max*A[i],prev_min*A[i]),A[i]);\r\n\t\tint cur_min=min(min(prev_max*A[i],prev_min*A[i]),A[i]);\r\n\t\tans=max(ans,cur_max);\r\n\t\tprev_max=cur_max;\r\n\t\tprev_min=cur_min;\r\n\t}\r\n\treturn ans;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tint sol=maxProduct(A);\r\n\tcout << sol <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint maxProduct(std::vector<int> A){\n\tif(A.empty())\n\t\treturn 0;\n\tif(A.size()==1)\n\t\treturn A[0];\n\tint ans=A[0];\n\tint prev_max=A[0],prev_min=A[0];\n\tfor(int i=1;i<A.size();i++){\n\t\tint cur_max=max(max(prev_max*A[i],prev_min*A[i]),A[i]);\n\t\tint cur_min=min(min(prev_max*A[i],prev_min*A[i]),A[i]);\n\t\tans=max(ans,cur_max);\n\t\tprev_max=cur_max;\n\t\tprev_min=cur_min;\n\t}\n\treturn ans;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint sol=maxProduct(A);\n\tcout << sol <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5289255976676941,
"alphanum_fraction": 0.5336481928825378,
"avg_line_length": 18.720930099487305,
"blob_id": "40dd38b9c5c9147f4b27349613d59606aaa8483b",
"content_id": "7400e70b7d7d11c6cb6e0bb5ca5b3aecd74a8daa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 847,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 43,
"path": "/coding/interviewbit/stack & queue/windowSize.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nstd::vector<int> window(std::vector<int> A,int k){\n\tdeque<int> w(k);\n\tstd::vector<int> result;\n\tif(A.size()<k)\n\t\treturn result;\n\tint i;\n\tfor(i=0;i<k;i++){\n\t\twhile((!w.empty())&&A[i]>=A[w.back()])\n\t\t\tw.pop_back();\n\t\tw.push_back(i);\n\t}\n\tfor(;i<A.size();i++){\n\t\tresult.push_back(A[w.front()]);\n\t\t//cout << A[w.front()] << \" \";\n\t\twhile((!w.empty())&&w.front()<=(i-k))\n\t\t\tw.pop_front();\n\t\twhile((!w.empty())&&A[i]>=A[w.back()])\n\t\t\tw.pop_back();\n\t\tw.push_back(i);\n\t}\n\tresult.push_back(A[w.front()]);\n\treturn result;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint k;\n\tcin >> k;\n\tstd::vector<int> result=window(A,k);\n\t//cout << result.size();\n\tfor (int i = 0; i < result.size(); ++i)\n\t{\n\t\t/* code */\n\t\tcout << result[i] << \" \";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.41098901629447937,
"alphanum_fraction": 0.46593406796455383,
"avg_line_length": 15.88888931274414,
"blob_id": "776dbd62aa4d43575079e876097425a2100a623e",
"content_id": "a89c3f0ee40a88316ed6e7205e29e43abf1909d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 455,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 27,
"path": "/coding/codechef/HEADBOB.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\nusing namespace std;\nint main(){\n\tint t;\n\tcin >> t;\n\twhile(t--){\n\t\tint n,f1=0,f2=0;\n\t\tcin >> n;\n\t\twhile(n--){\n\t\t\tchar ch;\n\t\t\tcin >> ch;\n\t\t\tif(ch=='Y')\n\t\t\t\tf1=1;\n\t\t\tif(ch=='I')\n\t\t\t\tf2=1;\n\t\t}\n\t\tif((f1!=1)&&(f2!=1))\n\t\t\tcout << \"NOT SURE\" << endl;\n\t\telse if((f1!=1)&&(f2==1))\n\t\t\tcout << \"INDIAN\" << endl;\n\t\telse if((f1==1)&&(f2!=1))\n\t\t\tcout << \"NOT INDIAN\" << endl;\n\t\telse if((f1==1)&&(f2==1))\n\t\t\tcout << \"NOT SURE\" << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5590659379959106,
"alphanum_fraction": 0.5700549483299255,
"avg_line_length": 17.225000381469727,
"blob_id": "51b2bedb5790349fa4280489fe5f6c2f1807d435",
"content_id": "fe4078306760173a868fe7b9ca0c53c45c943806",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 728,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 40,
"path": "/coding/codechef/ALPHABET.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\nusing namespace std;\nbool calculate(string letter, string words){\n\t// bool table[256]={false};\n\t// for(int i=0;i<letter.size();i++){\n\t// \tint idx=(int)letter[i];\n\t// \ttable[idx]=true;\n\t// }\n\t// for(int i=0;i<words.size();i++){\n\t// \tint idx=(int)words[i];\n\t// \tif(!table[idx])\n\t// \t\treturn false;\n\t// }\n\t// return true;\n\tfor(int i=0;i<words.size();i++){\n\t\tbool flag=false;\n\t\tfor(int j=0;j<letter.size();j++){\n\t\t\tif(words[i]==letter[j])\n\t\t\t\tflag=true;\n\t\t}\n\t\tif(!flag)\n\t\t\treturn false;\n\t}\n\treturn true;\n}\nint main(){\n\tstring letter,words;\n\tint n;\n\tcin >> letter;\n\tcin >> n;\n\twhile(n--){\n\t\tcin >> words;\n\t\tbool flag=calculate(words,letter);\n\t\tif(flag)\n\t\t\tcout << \"Yes\\n\";\n\t\telse\n\t\t\tcout << \"No\\n\";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.49373432993888855,
"alphanum_fraction": 0.5012531280517578,
"avg_line_length": 15,
"blob_id": "d9f3d570a6f50b95bf5a6db7d788b92e6d90f19e",
"content_id": "a3a66aad8d26e84da66081fbd3a4541a61752c4a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 399,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 25,
"path": "/coding/codechef/COLOR.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n// #include <vector>\n#include <cstdlib>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,n,r,g,b,m;\n\tstring color;\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> n;\n\t\tcin >> color;\n\t\tr=g=b=0;\n\t\tfor(int i=0;i<n;i++){\n\t\t\tif(color[i]=='R')\n\t\t\t\tr++;\n\t\t\tif(color[i]=='G')\n\t\t\t\tg++;\n\t\t\tif(color[i]=='B')\n\t\t\t\tb++;\n\t\t\tm = max(max(r,g),b);\n\t\t}\n\t\tcout << (n-m) << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5425631403923035,
"alphanum_fraction": 0.5893357992172241,
"avg_line_length": 28.72222137451172,
"blob_id": "81241e32d8569e0c0a0bedfdee8cb32218f3516d",
"content_id": "43b5397773ba7379b98a2e0b12c975375e7bc61b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1069,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 36,
"path": "/script/working-proxy.py",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "import urllib2\nimport socket\nimport threading\ndef is_bad_proxy(pip): \n try:\n timeout=1\n proxy_handler = urllib2.ProxyHandler({'http': pip})\n opener = urllib2.build_opener(proxy_handler)\n opener.addheaders = [('User-agent', 'Mozilla/5.0')]\n urllib2.install_opener(opener)\n req=urllib2.Request('http://www.google.com') # change the URL to test here\n sock=urllib2.urlopen(req,None,timeout)\n print \"%s is working\" % (pip)\n except urllib2.HTTPError, e:\n print 'Error code: ', e.code\n return e.code\n except Exception, detail:\n print \"ERROR:\", detail\n return True\n return False\n\ndef main():\n socket.setdefaulttimeout(0.1)\n\n # two sample proxy IPs\n #proxyList = ['172.16.117.6:8080', '172.16.115.80:8080']\n\n for i in range(255):\n for j in range(255):\n currentProxy='172.16.'+str(i)+'.'+str(j)\n a=threading.Thread(None,is_bad_proxy,None,currentProxy,None)\n a.start()\n a.join()\n\nif __name__ == '__main__':\n main()"
},
{
"alpha_fraction": 0.43927648663520813,
"alphanum_fraction": 0.5038759708404541,
"avg_line_length": 15.391304016113281,
"blob_id": "5fccc4304fae146cd23994c74f786720d4d70bb3",
"content_id": "050bd49aa888eb96c256a8b253c023b2499cd0ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1548,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 92,
"path": "/coding/geeksforgeeks/sorting/insertionSort.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid print(std::vector<int> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcout << A[i] << \" \";\r\n\t}\r\n}\r\nint insertionSort(std::vector<int> A){\r\n\tfor (int i = 0; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tint j=i-1;\r\n\t\tint tmp=A[i];\r\n\t\twhile(j>=0&&A[j]>tmp){\r\n\t\t\tA[j+1]=A[j];\r\n\t\t\tj--;\r\n\t\t}\r\n\t\tA[j+1]=tmp;\r\n\t\t//print(A);\r\n\t}\r\n\tprint(A);\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tstd::vector<int> A(n);\r\n\tcout << \"enter array\\n\";\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tcin >> A[i];\r\n\t}\r\n\tcout << \"usorted array\\n\";\r\n\tprint(A);\r\n\tcout << \"\\nsorted array\\n\";\r\n\tinsertionSort(A);\r\n\t//print(A);\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid print(std::vector<int> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tcout << A[i] << \" \";\n\t}\n}\nint insertionSort(std::vector<int> A){\n\tfor (int i = 0; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tint j=i-1;\n\t\tint tmp=A[i];\n\t\twhile(j>=0&&A[j]>tmp){\n\t\t\tA[j+1]=A[j];\n\t\t\tj--;\n\t\t}\n\t\tA[j+1]=tmp;\n\t\t//print(A);\n\t}\n\tprint(A);\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tstd::vector<int> A(n);\n\tcout << \"enter array\\n\";\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tcin >> A[i];\n\t}\n\tcout << \"usorted array\\n\";\n\tprint(A);\n\tcout << \"\\nsorted array\\n\";\n\tinsertionSort(A);\n\t//print(A);\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.45823389291763306,
"alphanum_fraction": 0.5083532333374023,
"avg_line_length": 15.15384578704834,
"blob_id": "51048043006bd459c171f69b5431a71f2946852d",
"content_id": "de03c2ccbd9c2f896ed6b1dd15ee2a3a2d5b44d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 419,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 26,
"path": "/coding/codechef/ICPC16A.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n// #include <vector>\n#include <cstdlib>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,x1,x2,y1,y2;\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> x1 >> y1 >> x2 >> y2 ;\n\t\tif(y1==y2){\n\t\t\tif(x1<x2)\n\t\t\t\tcout << \"right\\n\";\n\t\t\tif(x2<x1)\n\t\t\t\tcout << \"left\\n\";\n\t\t}\n\t\telse if(x1==x2){\n\t\t\tif(y1<y2)\n\t\t\t\tcout << \"up\\n\";\n\t\t\tif(y2<y1)\n\t\t\t\tcout << \"down\\n\";\n\t\t}\n\t\telse\n\t\t\tcout << \"sad\\n\";\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5384615659713745,
"alphanum_fraction": 0.5466448664665222,
"avg_line_length": 16,
"blob_id": "44ad1f9d607ac381e566620e09ffc9745499a501",
"content_id": "f2b935443dbb5f8b7d3767a46e71b39f890771e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 611,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 36,
"path": "/coding/codechef/TWONMS.java",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "import java.util.*;\nclass TWONMS {\n\tstatic int max(int a,int b){\n\t\t// return (a<b):b?a;\n\t\tif(a<b)\n\t\t\treturn b;\n\t\treturn a;\n\t}\n\tstatic int min(int a,int b){\n\t\t// return (a<b):a?b;\n\t\tif(a<b)\n\t\t\treturn a;\n\t\treturn b;\n\t}\n\tpublic static void main(String[] args) {\n\t\tint t,a,b,n;\n\t\tScanner read= new Scanner(System.in);\n\t\tt=read.nextInt();\n\t\twhile(t>0){\n\t\t\tt--;\n\t\t\ta=read.nextInt();\n\t\t\tb=read.nextInt();\n\t\t\tn=read.nextInt();\n\t\t\tif(n%2!=0){\n\t\t\t\tint mx=max(a*2,b);\n\t\t\t\tint mn=min(a*2,b);\n\t\t\t\tSystem.out.println(mx/mn);\n\t\t\t}\n\t\t\telse{\n\t\t\t\tint mx=max(a,b);\n\t\t\t\tint mn=min(a,b);\n\t\t\t\tSystem.out.println(mx/mn);\n\t\t\t}\n\t\t}\n\t}\n}"
},
{
"alpha_fraction": 0.4946666657924652,
"alphanum_fraction": 0.5040000081062317,
"avg_line_length": 17.774999618530273,
"blob_id": "ba840623a1b936015b6211c8627371100719d81a",
"content_id": "3a9420078b018be5e55e42e2a733afae05d9f1db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 750,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 40,
"path": "/coding/codechef/FRGTNLNG.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <vector>\n#include <cstdlib>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,n,k,num;\n\tstd::vector<std::vector<string> > input;\n\tstd::vector<string> tokens;\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> n >> k;\n\t\ttokens.resize(n);\n\t\tinput.resize(k);\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\tcin >> tokens[i];\n\t\t}\n\t\tfor (int i = 0; i < k; ++i){\n\t\t\tcin >> num ;\n\t\t\tinput[i].resize(num);\n\t\t\tfor (int j = 0; j < num; ++j){\n\t\t\t\tcin >> input[i][j];\n\t\t\t}\n\t\t}\n\t\tfor (int i = 0; i < n; ++i){\n\t\t\tbool flag=false;\n\t\t\tfor(int j=0;j<k;j++){\n\t\t\t\tfor(int l=0;l<input[j].size();l++){\n\t\t\t\t\tif(tokens[i]==input[j][l])\n\t\t\t\t\t\tflag=true;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif(flag)\n\t\t\t\tcout << \"YES \";\n\t\t\telse\n\t\t\t\tcout << \"NO \";\n\t\t}\n\t\tcout << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.39673912525177,
"alphanum_fraction": 0.4311594069004059,
"avg_line_length": 15.757575988769531,
"blob_id": "39b389f3efc82205d54903ec33e4b3e0f8e5ec99",
"content_id": "f8f48a9c54a3ce687f09efc66b94928dfaff26b8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 552,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 33,
"path": "/coding/codechef/project -euler.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main(){\n\tint i,j;\n\tfor(i=11;i<100;i++){\n\t\tfor(j=i+1;j<100;j++){\n\t\t\tfloat a=0;\n\t\t\tint w=i%10;\n\t\t\tint x=i/10;\n\t\t\tint y=j%10;\n\t\t\tint z=j/10;\n\t\t\tfloat o=(float)i/(float)j;\n\t\t\tif(w==y){\n\t\t\t\ta=(float)x/(float)z;\n\t\t\t\t//printf(\"%d %d\\n\",w,y );\n\t\t\t}\n\t\t\tif(w==z){\n\t\t\t\ta=(float)x/(float)y;\n\t\t\t\t//printf(\"%d %d\\n\",w,z );\n\t\t\t}\n\t\t\tif(x==y){\n\t\t\t\ta=(float)w/(float)z;\n\t\t\t\t//printf(\"%d %d\\n\",x,y );\n\t\t\t}\n\t\t\tif(x==z){\n\t\t\t\ta=(float)w/(float)y;\n\t\t\t\t//printf(\"%d %d\\n\",x,z );\n\t\t\t}\n\t\t\tif(a==o)\n\t\t\t\tprintf(\"%d %d %f %f \\n\",i,j,a,o );\n\t\t}\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.532760500907898,
"alphanum_fraction": 0.5477980375289917,
"avg_line_length": 15.642857551574707,
"blob_id": "b3477a916a9526a0ec533e8ec0fa4c84f1036fd4",
"content_id": "aabdd20ab9ea02dd558e4b0167e6bbd658da108d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 931,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 56,
"path": "/coding/data structure/hash.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\nint direct_hashing(int array[],int n,int key){\n\tint i,*arr;\n\tarr=(int *)malloc(sizeof(int)*(n+1));\n\tfor (i = 0; i < n; ++i)\n\t{\n\t\tarr[i]=0;\n\t}\n\tfor (i = 0; i < n; ++i)\n\t{\n\t\tint num=array[i];\n\t\tarr[num]=1;\n\t}\n\tif(key<=n)\n\t\tif(arr[key]==1)\n\t\t\treturn 1;\n\treturn 0;\n}\nint main()\n{\n\tint ch;\n\tint key;\n\tprintf(\"enter key which is to be searched\\n\");\n\tscanf(\"%d\", &key);\n\tint n,i,*arr;\n\tprintf(\"enter number of intergers\\n\");\n\tscanf(\"%d\", &n);\n\tarr=(int *)malloc(sizeof(int)*(n));\n\tprintf(\"enter intergers\\n\");\n\tfor (i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tscanf(\"%d\", &arr[i]);\n\t}\n\tprintf(\"enter choice to select hashing\\n\");\n\t//fflush(stdin);\n\tprintf(\"1 for direct hashing\\n2 for chaining\\n\");\n\tscanf(\"%d\", &ch);\n\tif (ch==1)\n\t{\n\t\tint r=direct_hashing(arr,n,key);\n\t\tif(r)\n\t\t\tprintf(\"found\\n\");\n\t\telse\n\t\t\tprintf(\"not found\\n\");\n\t\t/* code */\n\t}\n\tif (ch==2)\n\t{\n\t\t//chaining();\n\t\t/* code */\n\t}\n\t\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5027933120727539,
"alphanum_fraction": 0.5307262539863586,
"avg_line_length": 12.84615421295166,
"blob_id": "d6ed9aebc46111b0433f7a1cebc94580d5f21547",
"content_id": "df563c44661d79bd52519fd9835da4428dd41374",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 179,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 13,
"path": "/coding/codechef/trisq.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\nusing namespace std;\nint main(){\n\tint t;\n\tcin >> t;\n\twhile(t--){\n\t\tint b;\n\t\tcin >> b;\n\t\tlong int val=((b/2)*((b/2)-1))/2;\n\t\tcout << val << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.45121949911117554,
"alphanum_fraction": 0.47154471278190613,
"avg_line_length": 13.5,
"blob_id": "67cc3dc68df5908190904e1caebdec471e3b8b44",
"content_id": "3c8f410fc43713d73cf2713d44e4c63b88e7aedf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 492,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 34,
"path": "/coding/codechef/CCOOK.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint n;\n\tcin >> n;\n\twhile(n--){\n\t\tint num,c=0;\n\t\tfor (int i = 0; i < 5; ++i){\n\t\t\t/* code */\n\t\t\tcin >> num;\n\t\t\tif(num)\n\t\t\t\tc++;\n\t\t}\n\t\tif(c==0){\n\t\t\tcout << \"Beginner\\n\";\n\t\t}\n\t\tif(c==1){\n\t\t\tcout << \"Junior Developer\\n\";\n\t\t}\n\t\tif(c==2){\n\t\t\tcout << \"Middle Developer\\n\";\n\t\t}\n\t\tif(c==3){\n\t\t\tcout << \"Senior Developer\\n\";\n\t\t}\n\t\tif(c==4){\n\t\t\tcout << \"Hacker\\n\";\n\t\t}\n\t\tif(c==5){\n\t\t\tcout << \"Jeff Dean\\n\";\n\t\t}\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4362744987010956,
"alphanum_fraction": 0.5117647051811218,
"avg_line_length": 18.173076629638672,
"blob_id": "39058da85695a320b72e08d1536c282a87c66338",
"content_id": "22f23997acd1f313b2285e935f98526ac2ba200f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2040,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 104,
"path": "/coding/interviewbit/DP/uniquePathGrid.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint uniquePath(std::vector<std::vector<int> > A){\r\n\tint row=A.size();\r\n\tint col=A[0].size();\r\n\tstd::vector<std::vector<int> > path(row,std::vector<int>(col,0));\r\n\tpath[0][0]=1;\r\n\tif(A[0][0]==1)\r\n\t\treturn 0;\r\n\tif(row==0)\r\n\t\treturn 0;\r\n\tif(col==0)\r\n\t\treturn 0;\r\n\tfor (int i = 1; i < row; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tif(A[i][0]!=1)\r\n\t\t\tpath[i][0]=path[i-1][0];\r\n\t}\r\n\tfor(int j=1;j<col;j++)\r\n\t\tif(A[0][j]!=1)\r\n\t\t\tpath[0][i]=path[0][i-1];\r\n\tfor(int i=1;i<row;i++){\r\n\t\tfor(int j=1;j<col;j++){\r\n\t\t\tif(A[i][j]!=1)\r\n\t\t\t\tpath[i][j]=path[i-1][j]+path[i][j-1];\r\n\t\t}\r\n\t}\r\n\treturn path[row-1][col-1];\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tstd::vector<std::vector<int> > A(n,std::vector<int>(m));\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tfor (int j = 0; j < m; ++j)\r\n\t\t{\r\n\t\t\t/* code */\r\n\t\t\tcin >> A[i][j];\r\n\t\t}\r\n\t}\r\n\tint sol=uniquePath(A);\r\n\tcout << sol << endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint uniquePath(std::vector<std::vector<int> > A){\n\tint row=A.size();\n\tint col=A[0].size();\n\tstd::vector<std::vector<int> > path(row,std::vector<int>(col,0));\n\tpath[0][0]=1;\n\tif(A[0][0]==1)\n\t\treturn 0;\n\tif(row==0)\n\t\treturn 0;\n\tif(col==0)\n\t\treturn 0;\n\tfor (int i = 1; i < row; ++i)\n\t{\n\t\t/* code */\n\t\tif(A[i][0]!=1)\n\t\t\tpath[i][0]=path[i-1][0];\n\t}\n\tfor(int j=1;j<col;j++)\n\t\tif(A[0][j]!=1)\n\t\t\tpath[0][i]=path[0][i-1];\n\tfor(int i=1;i<row;i++){\n\t\tfor(int j=1;j<col;j++){\n\t\t\tif(A[i][j]!=1)\n\t\t\t\tpath[i][j]=path[i-1][j]+path[i][j-1];\n\t\t}\n\t}\n\treturn path[row-1][col-1];\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tstd::vector<std::vector<int> > A(n,std::vector<int>(m));\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\t/* code */\n\t\tfor (int j = 0; j < m; ++j)\n\t\t{\n\t\t\t/* code */\n\t\t\tcin >> A[i][j];\n\t\t}\n\t}\n\tint sol=uniquePath(A);\n\tcout << sol << endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5507246255874634,
"alphanum_fraction": 0.5833333134651184,
"avg_line_length": 12.850000381469727,
"blob_id": "7c0820404edcf4c06fb4caef3478016b6e6e16a3",
"content_id": "afc755fcb0c7326f801d7c219bcf52f618184632",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 276,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 20,
"path": "/coding/python/n-c-r.py",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "def n_c_r(n,r):\n\tfull=1\n\thalf=1\n\tother=1\n\tfor i in range(2,n+1):\n\t\tfull=full*i\n\t\tif i<=r:\n\t\t\thalf=half*i\n\t\tif i<=(n-r):\n\t\t\tother=other*i\n\t\tif full%half==0:\n\t\t\tfull=full/half\n\t\t\thalf=1\n\t\tif full%other==0:\n\t\t\tfull=full/other\n\t\t\tother=1\n\tprint full\nn=input()\nr=input()\nn_c_r(n,r)"
},
{
"alpha_fraction": 0.46693122386932373,
"alphanum_fraction": 0.5410053133964539,
"avg_line_length": 15.75,
"blob_id": "0d37dc360472e8f02ad7a5c9051f874172b8a013",
"content_id": "a4c2a588af00d9abd14b6381789adb35eaa9c69c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1512,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 88,
"path": "/coding/geeksforgeeks/sorting/heapSort.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint heapify(int* A,int idx,int n){\r\n\tint left=idx*2+1;\r\n\tint right=idx*2+2;\r\n\tint large=idx;\r\n\tif(left<n&&A[left]>A[idx])\r\n\t\tlarge=left;\r\n\tif(right<n&&A[right]>A[large])\r\n\t\tlarge=right;\r\n\tif(large!=idx){\r\n\t\tswap(A[idx],A[large]);\r\n\t\theapify(A,large,n);\r\n\t}\r\n\r\n}\r\nint heapSort(int* A,int n){\r\n\tfor(int i=n/2-1;i>=0;i--)\r\n\t\theapify(A,i,n);\r\n\tfor(int i=n-1;i>=0;i--){\r\n\t\tswap(A[0],A[i]);\r\n\t\theapify(A,0,i);\r\n\t}\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\tcout << A[i] << ' ';\r\n\t}\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tint A[n];\r\n\tfor (int i = 0; i < n; ++i)\r\n\t{\r\n\t\tcin >> A[i];\r\n\t}\r\n\theapSort(A,n);\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint heapify(int* A,int idx,int n){\n\tint left=idx*2+1;\n\tint right=idx*2+2;\n\tint large=idx;\n\tif(left<n&&A[left]>A[idx])\n\t\tlarge=left;\n\tif(right<n&&A[right]>A[large])\n\t\tlarge=right;\n\tif(large!=idx){\n\t\tswap(A[idx],A[large]);\n\t\theapify(A,large,n);\n\t}\n\n}\nint heapSort(int* A,int n){\n\tfor(int i=n/2-1;i>=0;i--)\n\t\theapify(A,i,n);\n\tfor(int i=n-1;i>=0;i--){\n\t\tswap(A[0],A[i]);\n\t\theapify(A,0,i);\n\t}\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\tcout << A[i] << ' ';\n\t}\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tint A[n];\n\tfor (int i = 0; i < n; ++i)\n\t{\n\t\tcin >> A[i];\n\t}\n\theapSort(A,n);\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5793103575706482,
"alphanum_fraction": 0.5862069129943848,
"avg_line_length": 18.33333396911621,
"blob_id": "b91a5d0c6df16480ce33ad015a7ac1562b78d0e6",
"content_id": "7a2a3d96186ec464bc61680d174834074436f50f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 870,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 45,
"path": "/coding/interviewbit/array/primeSum.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<bits/stdc++.h>\nusing namespace std;\nclass primeSum {\nprivate:\n /* data */\n int number;\n bool isPrime(int number);\npublic:\n primeSum (int num){\n number=num;\n };\n //virtual ~primeSum ();\n std::vector<int> generate();\n};\nbool primeSum::isPrime(int number){\n for (size_t i = 2; i < number; i++) {\n /* code */\n if(!number%i)\n return false;\n }\n return true;\n}\nstd::vector<int> primeSum::generate(){\n int first=2;\n int last=number-2;\n std::vector<int> result;\n while (first<=last) {\n /* code */\n if(isPrime(first)&&isPrime(last)){\n result.push_back(first);\n result.push_back(last);\n return result;\n }\n first++;\n last=number-first;\n }\n}\nint main(){\n int n;\n cin >> n;\n primeSum getNumbers(n);\n std::vector<int> result=getNumbers.generate();\n cout << result[0] << \" \"<<result[1] << endl;\n return 0;\n}\n"
},
{
"alpha_fraction": 0.45182013511657715,
"alphanum_fraction": 0.5695931315422058,
"avg_line_length": 16.576923370361328,
"blob_id": "c54e2575d81701f0b8ae2f37551f3b33f068e155",
"content_id": "7336a8969aa67be13d6ed14d856348f12d2d488c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 934,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 52,
"path": "/coding/interviewbit/DP/stairs.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint stair(int n){\r\n\tvector<int> table(n+1);\r\n if(n==0)\r\n return 0;\r\n if(n==1)\r\n return 1;\r\n table[0]=1;\r\n table[1]=1;\r\n for(int i=2;i<=n;i++)\r\n table[i]=table[i-1]+table[i-2];\r\n return table[n];\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tint sol=stair(n);\r\n\tcout << sol <<endl;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint stair(int n){\n\tvector<int> table(n+1);\n if(n==0)\n return 0;\n if(n==1)\n return 1;\n table[0]=1;\n table[1]=1;\n for(int i=2;i<=n;i++)\n table[i]=table[i-1]+table[i-2];\n return table[n];\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tint sol=stair(n);\n\tcout << sol <<endl;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.48463356494903564,
"alphanum_fraction": 0.49881798028945923,
"avg_line_length": 14.703703880310059,
"blob_id": "27a8e47307ed9d6f1a6015875512ba59999b4023",
"content_id": "415f8b6e8ad938b1ce47b14cffa6a1b304bb8e71",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 423,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 27,
"path": "/coding/codechef/BRACKETS.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <vector>\nusing namespace std;\nint main(int argc, char const *argv[]){\n\tint t,n,bal;\n\tstring br;\n\tcin >> t;\n\twhile(t--){\n\t\tcin >> br;\n\t\tint mx=-1;\n\t\tbal=0;\n\t\tint len=br.size();\n\t\tfor(int i=0;i<len;i++){\n\t\t\tif(br[i]=='(')\n\t\t\t\tbal++;\n\t\t\tif(br[i]==')')\n\t\t\t\tbal--;\n\t\t\tmx=max(mx,bal);\n\t\t}\n\t\tfor(int i=0;i<mx;i++)\n\t\t\tcout << \"(\";\n\t\tfor(int i=0;i<mx;i++)\n\t\t\tcout << \")\";\n\t\tcout << endl;\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.42490118741989136,
"alphanum_fraction": 0.4644268751144409,
"avg_line_length": 16.482759475708008,
"blob_id": "1c1f853d15bfdcf2ee88005be7ed6136065703d3",
"content_id": "c24f099a599face9530a2d2b6b72b0e123087bc8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 506,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 29,
"path": "/coding/codechef/triangle-sum.c",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<stdlib.h>\nint main(){\n\tint t,i,j;\n\tscanf(\"%d\", &t);\n\twhile(t--){\n\t\tint n;\n\t\tscanf(\"%d\", &n);\n\t\tint *arr[n];\n\t\tfor(i=0;i<n;i++){\n\t\t\tarr[i]=(int *)malloc(sizeof(int)*(i+1));\n\t\t}\n\t\tfor(i=0;i<n;i++){\n\t\t\tfor(j=0;j<=i;j++){\n\t\t\t\tscanf(\"%d\", &arr[i][j]);\n\t\t\t}\n\t\t}\n\t\tfor(i=n-1;i>0;i--){\n\t\t\tfor(j=0;j<i;j++){\n\t\t\t\tint sum1=arr[i][j]+arr[i-1][j];\n\t\t\t\tint sum2=arr[i][j+1]+arr[i-1][j];\n\t\t\t\tint max=sum1>sum2?sum1:sum2;\n\t\t\t\tarr[i-1][j]=max;\n\t\t\t}\n\t\t}\n\t\tprintf(\"%d\\n\",arr[0][0]);\n\t}\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.38255032896995544,
"alphanum_fraction": 0.6644295454025269,
"avg_line_length": 16.294116973876953,
"blob_id": "01c1875a44bf6cb2cbd897f06eea8cf9569ac6e6",
"content_id": "3e971829d5efc9267d1466c3ecc971bff30bb86f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 298,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 17,
"path": "/coding/geeksforgeeks/desktop.ini",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n[ViewState]\r\nMode=\r\nVid=\r\nFolderType=Generic\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n[ViewState]\nMode=\nVid=\nFolderType=Generic\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n"
},
{
"alpha_fraction": 0.49433961510658264,
"alphanum_fraction": 0.5830188393592834,
"avg_line_length": 17.535715103149414,
"blob_id": "99d5db876b2ada3b6b70a7fe872109a6b11a665f",
"content_id": "805189b930d8a2c01ddcf82a145c4a5db4cd8976",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1060,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 56,
"path": "/coding/interviewbit/DP/maxProfit.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nint maxProfit(std::vector<int> A){\r\n\tint small=A[0];\r\n\tint profit=0;\r\n\tfor (int i = 1; i < A.size(); ++i)\r\n\t{\r\n\t\t/* code */\r\n\t\tsmall=min(small,A[i]);\r\n\t\tprofit=max(profit,A[i]-small);\r\n\t}\r\n\treturn profit;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\r\n\tvector<int> A(n);\r\n\t\tfor(int i=0;i<n;i++)\r\n\t\t\tcin >> A[i];\r\n\tint profit=maxProfit(A);\r\n\tcout << profit <<endl;\t\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nint maxProfit(std::vector<int> A){\n\tint small=A[0];\n\tint profit=0;\n\tfor (int i = 1; i < A.size(); ++i)\n\t{\n\t\t/* code */\n\t\tsmall=min(small,A[i]);\n\t\tprofit=max(profit,A[i]-small);\n\t}\n\treturn profit;\n}\nint main(){\n\tint n;\n\tcin >> n;\n\tvector<int> A(n);\n\t\tfor(int i=0;i<n;i++)\n\t\t\tcin >> A[i];\n\tint profit=maxProfit(A);\n\tcout << profit <<endl;\t\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.5185185074806213,
"alphanum_fraction": 0.5275275111198425,
"avg_line_length": 13.925373077392578,
"blob_id": "2fdea973aead3cacce0a3affc582a976b0aa0165",
"content_id": "92c81bedf72edbbf2ad938ee30a73d1c923d388e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 999,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 67,
"path": "/coding/codechef/MOVIEWKN.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "#include<iostream>\n#include <vector>\nusing namespace std;\nclass MOVIEWKN\n{\n\tint size;\n\tstd::vector<int> L;\n\tstd::vector<int> R;\npublic:\n\tMOVIEWKN(){};\n\t// ~MOVIEWKN();\n\tvoid getData();\n\tint calculate();\n};\nvoid MOVIEWKN::getData(){\n\tcin >> size;\n\tL.resize(size);\n\tR.resize(size);\n\tfor (int i = 0; i < size; ++i){\n\t\tcin >> L[i];\n\t}\n\t// cout << size <<endl;\n\tfor (int i = 0; i < size; ++i){\n\t\tcin >> R[i];\n\t}\n}\nint MOVIEWKN::calculate(){\n\t// std::vector<int> m;\n\tint m=0,index=-1,prod,Rmax=0,Lmax=0;\n\tfor (int i = 0; i < size; ++i){\n\t\tprod= L[i]*R[i];\n\t\tif(prod>m){\n\t\t\tm=prod;\n\t\t\tindex=(i);\n\t\t\tRmax=R[i];\n\t\t\tLmax=L[i];\n\t\t}\n\t\tif(prod==m){\n\t\t\tif(R[i]>Rmax){\n\t\t\t\tm=prod;\n\t\t\t\tindex=(i);\n\t\t\t\tRmax=R[i];\n\t\t\t\tLmax=L[i];\t\n\t\t\t}\n\t\t\telse if(index>i){\n\t\t\t\tm=prod;\n\t\t\t\tindex=(i);\n\t\t\t\tRmax=R[i];\n\t\t\t\tLmax=L[i];\n\t\t\t}\n\t\t}\n\t}\n\treturn (index+1);\n}\nint main()\n{\n\tint t;\n\tcin >> t;\n\tMOVIEWKN object;//= new MOVIEWKN();\n\twhile(t--){\n\t\tobject.getData();\n\t\tint res=object.calculate();\n\t\tcout << res << endl;\n\t}\n\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.49367785453796387,
"alphanum_fraction": 0.5552501082420349,
"avg_line_length": 18.77777862548828,
"blob_id": "fa3e6b77c1494b9de9dfc20cc044925c1bac2a7a",
"content_id": "82910efc1fd28055aa0ab718391b63765877b569",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1819,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 90,
"path": "/coding/interviewbit/stack & queue/RainWater.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\nvoid print(vector<int> A){\r\n\tfor(int i=0;i<A.size();i++)\r\n\t\tcout << A[i] <<\" \";\r\n\tcout << endl;\r\n}\r\nint max(int a,int b){\r\n\treturn ((a>b)?a:b);\r\n}\r\nint min(int a,int b){\r\n\treturn ((a<b)?a:b);\r\n}\r\nint evalute(vector<int> A){\r\n\tint sum=0;\r\n\tvector<int> left(A.size());\r\n\tvector<int> right(A.size());\r\n\tleft[0]=A[0];\r\n\tright[A.size()-1]=A[A.size()-1];\r\n\tfor(int i=1;i<A.size();i++)\r\n\t\tleft[i]=max(left[i-1],A[i]);\r\n\tfor(int i=A.size()-2;i>=0;i--)\r\n\t\tright[i]=max(right[i+1],A[i]);\r\n\tfor(int i=0;i<A.size();i++)\r\n\t\tsum=sum+min(left[i],right[i])-A[i];\r\n\t//print(A);\r\n\t//print(left);\r\n\t//print(right);\r\n\treturn sum;\r\n}\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\tvector<int> A(n);\r\n\tfor(int i=0;i<n;i++)\r\n\t\tcin >> A[i];\r\n\tint res=evalute(A);\r\n\tcout << res;\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\nvoid print(vector<int> A){\n\tfor(int i=0;i<A.size();i++)\n\t\tcout << A[i] <<\" \";\n\tcout << endl;\n}\nint max(int a,int b){\n\treturn ((a>b)?a:b);\n}\nint min(int a,int b){\n\treturn ((a<b)?a:b);\n}\nint evalute(vector<int> A){\n\tint sum=0;\n\tvector<int> left(A.size());\n\tvector<int> right(A.size());\n\tleft[0]=A[0];\n\tright[A.size()-1]=A[A.size()-1];\n\tfor(int i=1;i<A.size();i++)\n\t\tleft[i]=max(left[i-1],A[i]);\n\tfor(int i=A.size()-2;i>=0;i--)\n\t\tright[i]=max(right[i+1],A[i]);\n\tfor(int i=0;i<A.size();i++)\n\t\tsum=sum+min(left[i],right[i])-A[i];\n\t//print(A);\n\t//print(left);\n\t//print(right);\n\treturn sum;\n}\nint main(){\n\tint n;\n\tcin >> n;\t\n\tvector<int> A(n);\n\tfor(int i=0;i<n;i++)\n\t\tcin >> A[i];\n\tint res=evalute(A);\n\tcout << res;\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.48613375425338745,
"alphanum_fraction": 0.626427412033081,
"avg_line_length": 11.956521987915039,
"blob_id": "53a845ade711d7d382548d5dcb126e1fcdb2510c",
"content_id": "dcd1785aa49bfc835711965c0b0a707c4371d2cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 613,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 46,
"path": "/coding/geeksforgeeks/mirror.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n<<<<<<< HEAD\n#include<iostream>\r\n#include<bits/stdc++.h>\r\nusing namespace std;\r\n\r\nstruct Node\r\n{\r\n\tint key;\r\n\tstruct Node *left,*right;\r\n};\r\n\r\ntypedef struct Node node;\r\n\r\n\r\n\r\nint main(){\r\n\tint n;\r\n\tcin >> n;\t\r\n\treturn 0;\r\n=======\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n#include<iostream>\n#include<bits/stdc++.h>\nusing namespace std;\n\nstruct Node\n{\n\tint key;\n\tstruct Node *left,*right;\n};\n\ntypedef struct Node node;\n\n\n\nint main(){\n\tint n;\n\tcin >> n;\t\n\treturn 0;\n<<<<<<< HEAD\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n=======\n>>>>>>> 5d151628b32a2ef5b633f055e6961a6c0d18654b\n}"
},
{
"alpha_fraction": 0.4299212694168091,
"alphanum_fraction": 0.4299212694168091,
"avg_line_length": 22.090909957885742,
"blob_id": "ab15d782b8ce6d3d06ae5e481109a2c51096110e",
"content_id": "8aa826973c49f6f5216cbd856364430d5eaead38",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1270,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 55,
"path": "/coding/interviewbit/linkedList/mergeTwoSortedList.cpp",
"repo_name": "vishalgupta84/Programmes",
"src_encoding": "UTF-8",
"text": "/**\n * Definition for singly-linked list.\n * struct ListNode {\n * int val;\n * ListNode *next;\n * ListNode(int x) : val(x), next(NULL) {}\n * };\n */\nListNode* Solution::mergeTwoLists(ListNode* A, ListNode* B) {\n ListNode *result = nullptr, *tip = nullptr;\n while(A&&B){\n ListNode * newNode;\n if(A->val < B-> val){\n newNode = new ListNode(A->val);\n A = A->next;\n \n } else{\n newNode = new ListNode(B->val);\n B = B->next;\n }\n if(!result){\n newNode->next = result;\n result = newNode;\n tip = result;\n }\n else{\n tip->next = newNode;\n }\n }\n while(A){\n ListNode *newNode = new ListNode(A->val);\n A = A->next;\n if(!result){\n newNode->next = result;\n result = newNode;\n tip = result;\n }\n else{\n tip->next = newNode;\n }\n }\n while(B){\n ListNode *newNode = new ListNode(B->val);\n B = B->next;\n if(!result){\n newNode->next = result;\n result = newNode;\n tip = result;\n }\n else{\n tip->next = newNode;\n }\n }\n return result;\n}\n"
}
] | 92 |
lvzb/MonitorServer | https://github.com/lvzb/MonitorServer | 5e50f87d98217e44a59dc364b76761ba5414e7b8 | b804f626517ebb8c5c7a17adf3720ea89d4b6916 | 50fb548cb4e9c23552ec3ac1ce58bf5d81bc5d19 | refs/heads/master | 2020-08-15T01:58:42.079999 | 2019-12-05T02:03:00 | 2019-12-05T02:03:00 | 215,263,973 | 2 | 1 | null | null | null | null | null | [
{
"alpha_fraction": 0.8226950168609619,
"alphanum_fraction": 0.8368794322013855,
"avg_line_length": 19.14285659790039,
"blob_id": "5da9e59663fa82c42e8ee0d67c0c70984af14137",
"content_id": "b8eca4a15535ab8c015feca3420b4968093d20fe",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 235,
"license_type": "permissive",
"max_line_length": 40,
"num_lines": 7,
"path": "/README.md",
"repo_name": "lvzb/MonitorServer",
"src_encoding": "UTF-8",
"text": "# MonitorServer\n监控主机资源,包括CPU,内存,硬盘大小及数据库定时任务\n\n# python\npip3 install pymongo \npip3 install psutil \n在linux主机中使用conrtab做定时任务,定时执行python脚本收集信息\n"
},
{
"alpha_fraction": 0.5199999809265137,
"alphanum_fraction": 0.5365333557128906,
"avg_line_length": 32.12727355957031,
"blob_id": "e68aef76d2187f8ec06ff6c21fec6101a59ff8a6",
"content_id": "d2690622d84cc51b00a92d3f7143038d46d9fb1b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1875,
"license_type": "permissive",
"max_line_length": 149,
"num_lines": 55,
"path": "/monitor.py",
"repo_name": "lvzb/MonitorServer",
"src_encoding": "UTF-8",
"text": "# -*- coding=utf-8 -*-\r\nimport psutil\r\nimport json\r\nimport pymongo\r\nimport datetime\r\n\r\nclass Host():\r\n def __init__(self):\r\n self.partition = []\r\n self.proccess = []\r\n self.name = \"127.0.0.1\"\r\n\r\n def disk(self):\r\n part = psutil.disk_partitions()\r\n for item in part:\r\n diskUsage = psutil.disk_usage(item.mountpoint)\r\n disk_dic = {\"mountpoint\":item.mountpoint,\"total\":diskUsage.total,\"free\":diskUsage.free,\"percent\":diskUsage.percent,\"used\":diskUsage.used}\r\n self.partition.append(disk_dic)\r\n\r\n def nginxProcess(self):\r\n for proc in psutil.process_iter():\r\n if proc.name()=='nginx':\r\n try:\r\n pinfo = proc.as_dict(attrs=['pid', 'name','exe','cmdline'])\r\n except Exception, e:\r\n pass\r\n else:\r\n self.proccess.append(pinfo)\r\n\r\n\r\n def javaProcess(self):\r\n for proc in psutil.process_iter():\r\n if proc.name()=='java':\r\n try:\r\n pinfo = proc.as_dict(attrs=['pid', 'name','exe','cmdline'])\r\n except Exception, e:\r\n pass\r\n else:\r\n self.proccess.append(pinfo)\r\n\r\n\r\nif __name__ == '__main__':\r\n host = Host()\r\n host.name = \"192.168.1.20\";\r\n host.disk()\r\n host.javaProcess()\r\n host.nginxProcess()\r\n memory = psutil.virtual_memory()\r\n mem_dic = {\"total\":memory.total,\"used\":memory.used,\"free\":memory.free,\"percent\":memory.percent}\r\n conn = pymongo.MongoClient('127.0.0.1',27017)\r\n db_auth = conn.admin\r\n db_auth.authenticate(\"root\", \"mytest123\")\r\n db = conn.ossdb\r\n data = {\"name\":host.name,\"cpu\":psutil.cpu_percent(0),\"mem\":mem_dic,\"disk\":host.partition,\"process\":host.proccess,\"date\": datetime.datetime.now()}\r\n db.host.save(data)"
}
] | 2 |
zain08816/MAXX | https://github.com/zain08816/MAXX | 08af523dbcd6b71ffa781f993d1c0f03a147479b | 82b5473a9800b05524fd843d0e7c6c225416001a | a305d112e4fef1e8ac7d3c73ea0c777fe8ea7aab | refs/heads/master | 2021-06-24T23:58:53.538748 | 2021-03-20T04:06:59 | 2021-03-20T04:06:59 | 206,901,583 | 2 | 2 | null | 2019-09-07T02:01:54 | 2020-08-29T22:46:16 | 2021-03-20T04:06:59 | Python | [
{
"alpha_fraction": 0.6520069241523743,
"alphanum_fraction": 0.6731323003768921,
"avg_line_length": 30.325302124023438,
"blob_id": "b509274e7b2e3c8e2e88e70f120e56f68f165be2",
"content_id": "1d8de83c5a03d3c3a0eecc8508e839d9badb412d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5207,
"license_type": "no_license",
"max_line_length": 150,
"num_lines": 166,
"path": "/analyze.py",
"repo_name": "zain08816/MAXX",
"src_encoding": "UTF-8",
"text": "import statistics\nimport pandas as pd\n\n#bokeh\nfrom bokeh.io import output_file, show\nfrom bokeh.layouts import row, gridplot, grid, column, layout\nfrom bokeh.plotting import figure\nfrom bokeh.models import ColumnDataSource\n\n# Imports the Google Cloud client library\nfrom google.cloud import language\nfrom google.cloud.language import enums\nfrom google.cloud.language import types\n\n# Instantiates a client\nclient = language.LanguageServiceClient()\n\n\ntweets = []\nf = open(\"tweets.txt\", \"r\")\nname = ''\nfor x in f:\n x = x.split(' ')\n date = ' '.join(x[1:3])\n tweet = ' '.join(x[5:])\n name = x[4]\n tweets.append([date, tweet])\n\n# formatting name\nname = name[1:len(name)-1]\nname = '@'+name\n\nprint(name)\n# print(tweets) \n \n \n #= x[x.find('>')+2:len(x)-1]\n # date = x[20:39]\n # tweets.append([date,tweet.lower()])\n\n# tweets_and_dates = zip(dates, tweets)\n\nsentimentscore = []\nmagnitudescore = []\n\n# The text to analyze\nfor text in tweets:\n text = text[1]\n document = types.Document(\n content=text,\n type=enums.Document.Type.PLAIN_TEXT)\n\n # Detects the sentiment of the text\n sentiment = client.analyze_sentiment(document=document).document_sentiment\n print()\n print('Text: {}'.format(text))\n print('Sentiment: {}, {}'.format(sentiment.score, sentiment.magnitude))\n sentimentscore.append(sentiment.score)\n magnitudescore.append(sentiment.magnitude)\n\n# print(statistics.mean(sentimentscore))\n# print(statistics.mean(magnitudescore))\n\n\n#create the weitghted sentiment scores\nweighted_sentiment = [sentimentscore[i]*magnitudescore[i] for i in range(len(magnitudescore))]\ndates = [data[0] for data in tweets]\n\n# print(dates)\n# print(weighted_sentiment)\n\nfor i, date in enumerate(dates):\n dates[i] = pd.to_datetime(date, format = \"%Y-%m-%d %H:%M:%S\")\n\n# print(dates)\nx = dates\ny = weighted_sentiment\n\n\n#remove 0 sentiment items (0 sentiment usually occurs when tweet is a link or image)\nfor i, sent in enumerate(weighted_sentiment):\n if sent == 0.0:\n y.pop(i)\n x.pop(i)\nprint(y)\n\n#find average sentiment\naverage = statistics.mean(y)\nhalf = abs(average/2)\n\nprint(average)\n\n#get outliers\npositive_outliers_y = []\npositive_outliers_x = []\nnegative_outliers_y = []\nnegative_outliers_x = []\nfor i, sent in enumerate(y):\n if sent > average+half:\n positive_outliers_y.append(sent)\n positive_outliers_x.append(x[i])\n if sent < average-half:\n negative_outliers_y.append(sent)\n negative_outliers_x.append(x[i])\n\n# output to static HTML file\noutput_file(\"lines.html\")\n\n# create a new plot with a title and axis labels\n\ntop_title = \"Weighted Sentiment Over Time For User: {}\".format(name)\np = figure(plot_width = 1200, plot_height = 400, title=top_title, x_axis_label='Date-Time', y_axis_label='Weighted Sentiment', x_axis_type=\"datetime\")\n\n#plot average range\np.line(x = [min(x),max(x)], y = [average+half, average+half], line_width = 1)\np.line(x = [min(x),max(x)], y = [average, average], legend = 'Average Weighted Sentiment Line', line_width = 2, color = 'black')\np.line(x = [min(x),max(x)], y = [average-half, average-half], line_width = 1)\n\n#plot graph circles\np.circle(x, y, legend=\"Tweet Sentiment\", size = 7, color=\"purple\")\np.circle(positive_outliers_x, positive_outliers_y, size = 10, color=\"green\")\np.circle(negative_outliers_x, negative_outliers_y, size = 10, color=\"red\")\np.line(x, y, line_width = 1, color = \"pink\")\n\n#change total sentiment colors\ntotal_sentiment = sum(y)\nif total_sentiment > average:\n sentiment_bar_color = \"green\"\nelif total_sentiment < average:\n sentiment_bar_color = \"red\"\nelse:\n sentiment_bar_color = \"grey\"\n\nif average > 0:\n average_bar_color = \"green\"\nelif average == 0:\n average_bar_color = \"grey\"\nelse:\n average_bar_color = 'red'\n\n\ntotal = figure(plot_width = 200, plot_height = 400, title = \"Total User Sentiment\")\ntotal.vbar(x = [0], width = 0.25, bottom = 0, top = [total_sentiment], color = sentiment_bar_color)\n\naverage_weight = figure(plot_width = 200, plot_height = 400, title = \"Average Weighted Sentiment\")\naverage_weight.vbar(x = [0], width = 0.25, bottom = 0, top = [average], color = average_bar_color)\n\nmin_max = figure(plot_width = 200, plot_height = 400, title = \"Highest vs Lowest Weighted Sentiment\")\nmin_max.vbar(x = [0], width = 0.25, bottom = 0, top = [max(y)], color = 'green')\nmin_max.vbar(x = [0.3], width = 0.25, bottom = 0, top = [min(y)], color = 'red')\n\ncompare = figure(plot_width = 200, plot_height = 200, title = \"Negative Outliers vs Positive Outliers\")\ncompare.vbar(x = [0], width = 0.25, bottom = 0, top = [len(negative_outliers_y)], color = 'red')\ncompare.vbar(x = [0.3], width = 0.25, bottom = 0, top = [len(positive_outliers_y)], color = 'green')\n\naverage = figure(plot_width = 200, plot_height = 200, title = \"Average Sentiment and Average Magnitude\")\naverage.vbar(x = [0], width = 0.25, bottom = 0, top = [statistics.mean(sentimentscore)], color = 'blue')\naverage.vbar(x = [0.3], width = 0.25, bottom = 0, top = [statistics.mean(magnitudescore)], color = 'purple')\n\n\n#show graphs\nl = layout([\n [p],\n [total, min_max, compare, average, average_weight]\n], sizing_mode = 'stretch_both')\nshow(l)\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.7111111283302307,
"alphanum_fraction": 0.7142857313156128,
"avg_line_length": 27.454545974731445,
"blob_id": "771e14539aecd51663a07416bdd06161de36d5a5",
"content_id": "6ce9e64e2f1009e0e8506bdaffe65a49a346a280",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 630,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 22,
"path": "/twitter.py",
"repo_name": "zain08816/MAXX",
"src_encoding": "UTF-8",
"text": "\n# coding: utf-8\n\n# In[3]:\n\n\nimport twitter \n# Go to http://dev.twitter.com/apps/new to create an app and get values \n# for these credentials, which you'll need to provide in place of these \n# empty string values that are defined as placeholders. \n# See https://developer.twitter.com/en/docs/basics/authentication/overview/oauth \n\n\n# In[ ]:\n\n\n# for more information on Twitter's OAuth implementation. \nCONSUMER_KEY = \nCONSUMER_SECRET = \nOAUTH_TOKEN = \nOAUTH_TOKEN_SECRET = '' \nauth = twitter . oauth . OAuth ( OAUTH_TOKEN , OAUTH_TOKEN_SECRET , CONSUMER_KEY , CONSUMER_SECRET ) \ntwitter_api = twitter . Twitter ( auth = auth ) \n\n\n"
},
{
"alpha_fraction": 0.4285714328289032,
"alphanum_fraction": 0.668571412563324,
"avg_line_length": 13.583333015441895,
"blob_id": "a5bdd5b0fdeedde585dab2ad147ec8c2c23adc90",
"content_id": "2fdd5a14cb732be676d03653e10bbd75f5b7e4c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 175,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 12,
"path": "/requirements.txt",
"repo_name": "zain08816/MAXX",
"src_encoding": "UTF-8",
"text": "aiofiles==0.4.0\nblinker==1.4\nClick==7.0\nenum34==1.1.6\nh11==0.7.0\nh2==3.1.1\nhpack==3.0.0\nhyperframe==5.2.0\nitsdangerous==1.1.0\nJinja2==2.11.3\nMarkupSafe==1.1.1\nwsproto==0.14.1\n"
},
{
"alpha_fraction": 0.6341463327407837,
"alphanum_fraction": 0.6378986835479736,
"avg_line_length": 20.360000610351562,
"blob_id": "95f023dbff6c7d8a181a4ae70c28c40c0b8fcb25",
"content_id": "dbe2530c612d996427595ef126c8ea41a6eb44ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 533,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 25,
"path": "/MAXX/app.py",
"repo_name": "zain08816/MAXX",
"src_encoding": "UTF-8",
"text": "from quart import Quart, escape, request, render_template\nimport twint\nimport asyncio\n\napp = Quart(__name__)\nloop = asyncio.get_event_loop()\ntweets = []\n\[email protected]('/')\nasync def hello():\n #asyncio.ensure_future(blocking_function())\n return await render_template('home.html')\n\n# async def blocking_function():\n# c = twint.Config()\n\n# c.Username = \"hasanthehun\"\n# c.Limit = 20\n# c.Store_object = True\n# c.Store_object_tweets_list = tweets\n# twint.run.Search(c)\n\n\nif __name__ == '__main__':\n app.run()"
},
{
"alpha_fraction": 0.7371134161949158,
"alphanum_fraction": 0.7371134161949158,
"avg_line_length": 31.33333396911621,
"blob_id": "70e09e524b5cef3699f78878df05605fbb49345d",
"content_id": "ba81a52692954068fb9628ba4d2168a3f55ede86",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 972,
"license_type": "no_license",
"max_line_length": 246,
"num_lines": 30,
"path": "/README.md",
"repo_name": "zain08816/MAXX",
"src_encoding": "UTF-8",
"text": "## MAXX\n\n Monitored\n Analysis for \n eXtreme \n eXpressions\n \n *Twitter API Key in twitter.py is deauthorized, replace with your own key.*\n \n An automated web-scraper which analyzes digital expression (i.e. Twitter posts) for linguistic patterns which have been statistically related to extreme states of mind, such as mental illness or risk of harming one’s self and/or others.\n \n Input:\n Twitter account\n \n Output:\n Risk assessment of given account\n \n Algorithm:\n Monitors for and identifies certain linguistic factors which may be cause for concern\n \n Activates an alert if:\n a certain amount of risk factors are identified, and/or\n many risk factors are used in combination with each other\n \n Future plans:\n Generalize for use with platforms other than Twitter (Facebook, Reddit, Instagram)\n Refine the analysis algorithm\n Base the algorithm purely on hard research\n\nDevpost: https://devpost.com/software/maxx\n"
},
{
"alpha_fraction": 0.6650000214576721,
"alphanum_fraction": 0.6800000071525574,
"avg_line_length": 13.285714149475098,
"blob_id": "826254592380248abd9e254f3a7fcf658a92a8d5",
"content_id": "39617532dd196e69f473fe65655fd1d66210f657",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 200,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 14,
"path": "/scraper.py",
"repo_name": "zain08816/MAXX",
"src_encoding": "UTF-8",
"text": "import twint\nimport sys\nimport os\n\nc = twint.Config()\n\nc.Username = sys.argv[1]\nc.Limit = 20\nc.Output = \"tweets.txt\"\n\nif os.path.exists(\"tweets.txt\"):\n os.remove(\"tweets.txt\")\n\ntwint.run.Search(c)\n"
}
] | 6 |
mehyedes/stripe-slack-notifier | https://github.com/mehyedes/stripe-slack-notifier | 1cba771c1bab086fd7e396dd8365e7675e08a806 | ffb3767649f875128258cf6b4e60d14aa4c6f207 | fc68ad6eadd9afde0842db5ce72ff00a7080eeb8 | refs/heads/master | 2021-03-27T04:41:39.625784 | 2020-04-04T15:30:54 | 2020-04-04T15:30:54 | 247,787,458 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7504714131355286,
"alphanum_fraction": 0.7510999441146851,
"avg_line_length": 38.775001525878906,
"blob_id": "4e6a7061299f5b5638895091e6e04dd2c841c17f",
"content_id": "034f89e22eafa1b451c9e41f1ff69b6cecad089e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1591,
"license_type": "no_license",
"max_line_length": 253,
"num_lines": 40,
"path": "/README.md",
"repo_name": "mehyedes/stripe-slack-notifier",
"src_encoding": "UTF-8",
"text": "# stripe-slack-notifier\n\n\n\n`stripe-slack-notifier` is an [OpenFaaS](https://www.openfaas.com/) function that can be used as a webhook for sending Slack notifications when triggered by [Stripe API](https://stripe.com/) events( only `charge.succeeded` events are supported for now).\n\nThis function was created to showcase a real-world use case that involves [faasd](https://github.com/openfaas/faasd) and [inlets](https://github.com/inlets/inlets).\n\nMore details can found on the blog post on [myedes.io](https://myedes.io/stripe-serverless-webhook-faasd/).\n\n## Deployment to OpenFaaS\n\nClone the repository:\n```bash\n$ git clone https://github.com/mehyedes/stripe-slack-notifier.git\n$ cd stripe-slack-notifier/\n```\nFetch the `python3-http` template:\n```bash\n$ faas-cli template pull stack -f stripe-slack-notifier.yml\n```\nCreate the necessary secrets for the function:\n```bash\n$ export ${OPENFAAS_GATEWAY_URL}\n$ faas-cli secret create slack-webhook-url \\\n --from-literal=${SLACK_WEBHOOK_URL} --gateway ${OPENFAAS_GATEWAY_URL}\n$ faas-cli secret create stripe-secret-key \\\n --from-literal=${STRIPE_API_KEY} --gateway ${OPENFAAS_GATEWAY_URL}\n$ faas-cli secret create webhook-secret \\\n --from-literal=${WEBHOOK_SIGNING_SECRET} --gateway ${OPENFAAS_GATEWAY_URL}\n$ faas-cli secret list --gateway ${OPENFAAS_GATEWAY_URL}\nNAME\nslack-webhook-url\nstripe-secret-key\nwebhook-secret\n```\nDeploy to the OpenFaaS gateway\n```bash\n$ faas-cli deploy -f stripe-slack-notifier.yml --gateway ${OPENFAAS_GATEWAY_URL}\n```\n"
},
{
"alpha_fraction": 0.581632673740387,
"alphanum_fraction": 0.5918367505073547,
"avg_line_length": 27.45161247253418,
"blob_id": "e86183972ccb8fb50cf10d035aef99aab8f75cfb",
"content_id": "d3a518438325d4dbf9443daac42fede715d4f968",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1764,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 62,
"path": "/stripe-slack-notifier/handler.py",
"repo_name": "mehyedes/stripe-slack-notifier",
"src_encoding": "UTF-8",
"text": "import stripe\nfrom babel import numbers\nfrom slack_webhook import Slack\n\ndef fetch_secret(secret_name):\n secret_file = open(f\"/var/openfaas/secrets/{secret_name}\", 'r')\n return secret_file.read()\n\n\ndef handle(event, context):\n\n # Make sure to create the secrets below\n webhook_url = fetch_secret(\"slack-webhook-url\")\n stripe.api_key = fetch_secret(\"stripe-secret-key\")\n webhook_secret = fetch_secret(\"webhook-secret\")\n \n payload = event.body\n received_sig = event.headers.get(\"Stripe-Signature\", None)\n \n try:\n event = stripe.Webhook.construct_event(\n payload, received_sig, webhook_secret\n )\n except ValueError:\n print(\"Error while decoding event!\")\n return {\n \"body\": \"Bad payload\",\n \"statusCode\": 400\n }\n except stripe.error.SignatureVerificationError:\n print(\"Invalid signature!\")\n return {\n \"body\": \"Bad signature\", \n \"statusCode\": 400\n }\n\n # Fail for all other event types \n if event.type != \"charge.succeeded\":\n return {\n \"body\":\"Unsupported event type\",\n \"statusCode\": 422\n }\n \n amount = numbers.format_currency(\n event.data.object.amount / 100,\n event.data.object.currency.upper(), \n locale='en'\n )\n\n try:\n slack = Slack(url=webhook_url)\n slack.post(text=f\"You have a received a new payment of {amount} :moneybag: :tada:\")\n except:\n print(\"An error occured when trying to send slack message.\")\n return {\n \"body\": \"Could not send slack message\", \n \"statusCode\": 500\n }\n return {\n \"body\": \"Notification was sent successfully to Slack\", \n \"statusCode\": 200\n }\n"
},
{
"alpha_fraction": 0.8484848737716675,
"alphanum_fraction": 0.8484848737716675,
"avg_line_length": 7.25,
"blob_id": "398cf465404dd94dfa5c6363a903cf6460a7d6ff",
"content_id": "debfd087ae61961d6689d8852694c15d17051624",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 33,
"license_type": "no_license",
"max_line_length": 13,
"num_lines": 4,
"path": "/stripe-slack-notifier/requirements.txt",
"repo_name": "mehyedes/stripe-slack-notifier",
"src_encoding": "UTF-8",
"text": "slack-webhook\nstripe\nflask\nBabel\n"
}
] | 3 |
Pankhuri1999/Gender-and-age-classifier | https://github.com/Pankhuri1999/Gender-and-age-classifier | 1f45dea5616acd45c87beab55f09fd8110ccfad0 | d42fb86e712b5f9d59f2e9092024f8c51fe648cb | e3fec0576b74519eda3060ca3f4027fd1d6f81f0 | refs/heads/main | 2023-01-13T17:04:54.082525 | 2020-11-15T14:54:42 | 2020-11-15T14:54:42 | 313,049,217 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4826072156429291,
"alphanum_fraction": 0.49966228008270264,
"avg_line_length": 42.83333206176758,
"blob_id": "88dda70e2228fc69fcae4119fa255ac44eaabf03",
"content_id": "3664cd4b7ac15a9ae7843dbf89afec80dc60e5e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5922,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 132,
"path": "/wide_resnet.py",
"repo_name": "Pankhuri1999/Gender-and-age-classifier",
"src_encoding": "UTF-8",
"text": "\r\nimport logging\r\nimport sys\r\nimport numpy as np\r\nfrom keras.models import Model\r\nfrom keras.layers import Input, Activation, add, Dense, Flatten, Dropout\r\nfrom keras.layers.convolutional import Conv2D, AveragePooling2D\r\nfrom keras.layers.normalization import BatchNormalization\r\nfrom keras.regularizers import l2\r\nfrom keras import backend as K\r\nfrom tensorflow.keras.models import Sequential \r\n\r\nsys.setrecursionlimit(2 ** 20)\r\nnp.random.seed(2 ** 10)\r\n\r\n\r\nclass NeuralNetwork:\r\n def __init__(self, areaOfImage, height=16, k=8):\r\n self._height = height\r\n self._k = k\r\n self._dropout_probability = 0\r\n self._weight_decay = 0.0005\r\n self._use_bias = False\r\n self._weight_init = \"normal\"\r\n \r\n if K.image_data_format() == \"th\":\r\n logging.debug(\"image_dim_ordering = 'th'\")\r\n self.lineOfChannel = 1\r\n self.inputSides = (3, areaOfImage, areaOfImage)\r\n else:\r\n logging.debug(\"image_dim_ordering = 'tf'\")\r\n self.lineOfChannel = -1\r\n self.inputSides = (areaOfImage, areaOfImage, 3)\r\n\r\n # Wide residual network \r\n def _wide_basic(self, areaOfInput, areaOfOutput, stride):\r\n def f(net):\r\n\r\n conv_params = [[3, 3, stride, \"same\"],\r\n [3, 3, (1, 1), \"same\"]]\r\n\r\n n_bottleneck_plane = areaOfOutput\r\n\r\n # Residual block\r\n for i, v in enumerate(conv_params):\r\n if i == 0:\r\n if areaOfInput != areaOfOutput:\r\n net = BatchNormalization(axis=self.lineOfChannel)(net)\r\n net = Activation(\"relu\")(net)\r\n convs = net\r\n else:\r\n convs = BatchNormalization(axis=self.lineOfChannel)(net)\r\n convs = Activation(\"relu\")(convs)\r\n\r\n convs = Conv2D(n_bottleneck_plane, kernel_size=(v[0], v[1]),\r\n strides=v[2],\r\n padding=v[3],\r\n kernel_initializer=self._weight_init,\r\n kernel_regularizer=l2(self._weight_decay),\r\n use_bias=self._use_bias)(convs)\r\n else:\r\n convs = BatchNormalization(axis=self.lineOfChannel)(convs)\r\n convs = Activation(\"relu\")(convs)\r\n if self._dropout_probability > 0:\r\n convs = Dropout(self._dropout_probability)(convs)\r\n convs = Conv2D(n_bottleneck_plane, kernel_size=(v[0], v[1]),\r\n strides=v[2],\r\n padding=v[3],\r\n kernel_initializer=self._weight_init,\r\n kernel_regularizer=l2(self._weight_decay),\r\n use_bias=self._use_bias)(convs)\r\n\r\n if areaOfInput != areaOfOutput:\r\n shortcut = Conv2D(areaOfOutput, kernel_size=(1, 1),\r\n strides=stride,\r\n padding=\"same\",\r\n kernel_initializer=self._weight_init,\r\n kernel_regularizer=l2(self._weight_decay),\r\n use_bias=self._use_bias)(net)\r\n else:\r\n shortcut = net\r\n\r\n return add([convs, shortcut])\r\n\r\n return f\r\n\r\n\r\n # Stacking Residual Units \r\n def _layer(self, block, areaOfInput, areaOfOutput, count, stride):\r\n def f(net):\r\n net = block(areaOfInput, areaOfOutput, stride)(net)\r\n for i in range(2, int(count + 1)):\r\n net = block(areaOfOutput, areaOfOutput, stride=(1, 1))(net)\r\n return net\r\n\r\n return f\r\n\r\n\r\n def __call__(self):\r\n logging.debug(\"Creating model...\")\r\n\r\n assert ((self._height - 4) % 6 == 0)\r\n n = (self._height - 4) / 6\r\n\r\n inputs = Input(shape=self.inputSides)\r\n\r\n n_stages = [16, 16 * self._k, 32 * self._k, 64 * self._k]\r\n\r\n conv1 = Conv2D(filters=n_stages[0], kernel_size=(3, 3),\r\n strides=(1, 1),\r\n padding=\"same\",\r\n kernel_initializer=self._weight_init,\r\n kernel_regularizer=l2(self._weight_decay),\r\n use_bias=self._use_bias)(inputs) \r\n # Adding wide residual blocks\r\n block_fn = self._wide_basic\r\n conv2 = self._layer(block_fn, areaOfInput=n_stages[0], areaOfOutput=n_stages[1], count=n, stride=(1, 1))(conv1)\r\n conv3 = self._layer(block_fn, areaOfInput=n_stages[1], areaOfOutput=n_stages[2], count=n, stride=(2, 2))(conv2)\r\n conv4 = self._layer(block_fn, areaOfInput=n_stages[2], areaOfOutput=n_stages[3], count=n, stride=(2, 2))(conv3)\r\n batch_norm = BatchNormalization(axis=self.lineOfChannel)(conv4)\r\n relu = Activation(\"relu\")(batch_norm)\r\n\r\n # Code for Classifier block\r\n pool = AveragePooling2D(pool_size=(8, 8), strides=(1, 1), padding=\"same\")(relu)\r\n flatten = Flatten()(pool)\r\n predictions_g = Dense(units=2, kernel_initializer=self._weight_init, use_bias=self._use_bias,\r\n kernel_regularizer=l2(self._weight_decay), activation=\"softmax\")(flatten)\r\n predictions_a = Dense(units=101, kernel_initializer=self._weight_init, use_bias=self._use_bias,\r\n kernel_regularizer=l2(self._weight_decay), activation=\"softmax\")(flatten)\r\n\r\n model = Model(inputs=inputs, outputs=[predictions_g, predictions_a])\r\n\r\n return model\r\n\r\n"
},
{
"alpha_fraction": 0.7241379022598267,
"alphanum_fraction": 0.7471264600753784,
"avg_line_length": 16.399999618530273,
"blob_id": "81ca9b004ec071b44a3d4ae73367c8ebef108843",
"content_id": "f02e8f9e124166ff55b407c32faa7c9b4b975b49",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 87,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 5,
"path": "/README.md",
"repo_name": "Pankhuri1999/Gender-and-age-classifier",
"src_encoding": "UTF-8",
"text": "# Gender-and-age-classifier\n\n1) First run wide_resnet.py\n\n2) Then run realtime_demo.py\n"
}
] | 2 |
gabywang/cs61a | https://github.com/gabywang/cs61a | c5c0af4bba846a9cd2368809c54642c56ae70b83 | c4100207e96bc39bdda24124e91f3b8754d1dd9e | b11bcdd60b3fde564c95cd4e6786aa79ada765df | refs/heads/master | 2018-12-18T21:22:08.331284 | 2018-09-14T23:57:26 | 2018-09-14T23:57:26 | 103,431,366 | 2 | 4 | null | null | null | null | null | [
{
"alpha_fraction": 0.7826887369155884,
"alphanum_fraction": 0.7882136106491089,
"avg_line_length": 59.44444274902344,
"blob_id": "ade73a27b93c024ff20638f27d1c9a3ca6fd7127",
"content_id": "91cee93fd5a5c9012479b0d4e10376e4e222fde1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 543,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 9,
"path": "/projects/ants/README.md",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "# Project3: Ants Vs. SomeBees\nhttps://cs61a.org/proj/ants/\n\n- In this project, you will create a tower defense game called Ants Vs. SomeBees.\nAs the ant queen, you populate your colony with the bravest ants you can muster.\nYour ants must protect their queen from the evil bees that invade your territory.\nIrritate the bees enough by throwing leaves at them, and they will be vanquished.\nFail to pester the airborne intruders adequately, and your queen will succumb to the bees' wrath.\nThis game is inspired by PopCap Games' Plants Vs. Zombies."
},
{
"alpha_fraction": 0.4379584491252899,
"alphanum_fraction": 0.46301954984664917,
"avg_line_length": 21.801393508911133,
"blob_id": "3001511c19ea9bef3ec3a251586a111ea3d264f0",
"content_id": "2ea8f66da7c6e695a0e93851c95a372e71f7cacf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6544,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 287,
"path": "/demo/19.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "class Tree:\n \"\"\"A tree is a root value and a list of branches.\"\"\"\n def __init__(self, label, branches=[]):\n self.label = label\n for branch in branches:\n assert isinstance(branch, Tree)\n self.branches = list(branches)\n\n def __repr__(self):\n if self.branches:\n branch_str = ', ' + repr(self.branches)\n else:\n branch_str = ''\n return 'Tree({0}{1})'.format(self.label, branch_str)\n\n def __str__(self):\n return '\\n'.join(self.indented())\n\n def indented(self, k=0):\n indented = []\n for b in self.branches:\n for line in b.indented(k + 1):\n indented.append(' ' + line)\n return [str(self.label)] + indented\n\n def is_leaf(self):\n return not self.branches\n\ndef memo(f):\n cache = {}\n def memoized(n):\n if n not in cache:\n cache[n] = f(n)\n return cache[n]\n return memoized\n\n@memo\ndef fib_tree(n):\n \"\"\"A Fibonacci tree.\n\n >>> print(fib_tree(4))\n 3\n 1\n 0\n 1\n 2\n 1\n 1\n 0\n 1\n \"\"\"\n if n == 0 or n == 1:\n return Tree(n)\n else:\n left = fib_tree(n-2)\n right = fib_tree(n-1)\n fib_n = left.label + right.label\n return Tree(fib_n, [left, right])\n\ndef leaves(tree):\n \"\"\"Return the leaf values of a tree.\n\n >>> leaves(fib_tree(4))\n [0, 1, 1, 0, 1]\n \"\"\"\n if tree.is_leaf():\n return [tree.label]\n else:\n return sum([leaves(b) for b in tree.branches], [])\n\ndef height(tr):\n \"\"\"The height of TR.\"\"\"\n if tr.is_leaf():\n return 0\n else:\n return 1 + max([height(b) for b in tr.branches])\n\ndef prune(t, n):\n \"\"\"Prune sub-trees whose label value is n.\n\n >>> t = fib_tree(5)\n >>> prune(t, 1)\n >>> print(t)\n 5\n 2\n 3\n 2\n \"\"\"\n t.branches = [b for b in t.branches if b.label != n]\n for b in t.branches:\n prune(b, n)\n\ndef prune_repeats(t, seen):\n \"\"\"Remove repeated sub-trees\n\n >>> def fib_tree(n):\n ... if n == 0 or n == 1:\n ... return Tree(n)\n ... else:\n ... left = fib_tree(n-2)\n ... right = fib_tree(n-1)\n ... return Tree(left.label + right.label, (left, right))\n >>> fib_tree = memo(fib_tree)\n >>> t = fib_tree(6)\n >>> print(t)\n 8\n 3\n 1\n 0\n 1\n 2\n 1\n 1\n 0\n 1\n 5\n 2\n 1\n 1\n 0\n 1\n 3\n 1\n 0\n 1\n 2\n 1\n 1\n 0\n 1\n >>> prune_repeats(t, [])\n >>> print(t)\n 8\n 3\n 1\n 0\n 1\n 2\n 5\n \"\"\"\n t.branches = [b for b in t.branches if b not in seen]\n seen.append(t)\n for b in t.branches:\n prune_repeats(b, seen)\n\n\n\n\n\n\n\n\n\n\ndef hailstone(n):\n \"\"\"Print a hailstone sequence and return its length.\n\n >>> a = hailstone(10)\n 10\n 5\n 16\n 8\n 4\n 2\n 1\n >>> a\n 7\n \"\"\"\n print(n)\n if n == 1:\n return 1\n elif n % 2 == 0:\n return 1 + hailstone(n//2)\n else:\n return 1 + hailstone(3*n+1)\n\ndef is_int(x):\n return int(x) == x\n\ndef is_odd(n):\n return n % 2 == 1\n\ndef hailstone_tree(k, n=1):\n \"\"\"Build a tree in which paths are hailstone sequences.\n\n >>> hailstone_tree(6)\n Tree(1, [Tree(2, [Tree(4, [Tree(8, [Tree(16, [Tree(32), Tree(5)])])])])])\n >>> leaves(hailstone_tree(11))\n [1024, 170, 168, 160, 26, 24]\n \"\"\"\n if k == 1:\n return Tree(n)\n else:\n up, down = 2*n, (n-1)/3\n branches = [hailstone_tree(k-1, up)]\n if down > 1 and is_int(down) and is_odd(down):\n branches.append(hailstone_tree(k-1, int(down)))\n return Tree(n, branches)\n\ndef longest_path_below(k, t):\n \"\"\"Return the longest path through t of values all down than k.\n\n >>> longest_path_below(20, hailstone_tree(10))\n [1, 2, 4, 8, 16, 5, 10, 3, 6, 12]\n \"\"\"\n if t.label >= k:\n return []\n elif t.is_leaf():\n return [t.label]\n else:\n paths = [longest_path_below(k, b) for b in t.branches]\n return [t.label] + max(paths, key=len)\n\n# Printing a tree laid out horizontally. This is quite tricky. The\n# solution below is not particularly efficient, and uses features of\n# Python we have not yet covered. By all means take a look at it,\n# but we don't expect you to understand it comletely.\n\nfrom io import StringIO\n# A StringIO is a file-like object that builds a string instead of printing\n# anything out.\n\ndef width(tr):\n \"\"\"Returns the printed width of this tree.\"\"\"\n lbl_wid = len(str(tr.label))\n w = max(lbl_wid,\n sum([width(t) for t in tr.branches]) + len(tr.branches) - 1)\n extra = (w - lbl_wid) % 2\n return w + extra\n\ndef pretty(tree):\n \"\"\"Print TREE laid out horizontally rather than vertically.\"\"\"\n\n def gen_levels(tr):\n w = width(tr)\n lbl = str(tr.label)\n lbl_pad = \" \" * ((w - len(lbl)) // 2)\n yield w\n print(lbl_pad, file=out, end=\"\")\n print(lbl, file=out, end=\"\")\n print(lbl_pad, file=out, end=\"\")\n yield \n\n if tr.is_leaf():\n pad = \" \" * w\n while True:\n print(pad, file=out, end=\"\")\n yield\n below = [ gen_levels(b) for b in tr.branches ]\n L = 0\n for g in below:\n if L > 0:\n print(\" \", end=\"\", file=out)\n L += 1\n w1 = next(g)\n left = (w1-1) // 2\n right = w1 - left - 1\n mid = L + left\n print(\" \" * left, end=\"\", file=out)\n if mid*2 + 1 == w:\n print(\"|\", end=\"\", file=out)\n elif mid*2 > w:\n print(\"\\\\\", end=\"\", file=out)\n else:\n print(\"/\", end=\"\", file=out)\n print(\" \" * right, end=\"\", file=out)\n L += w1\n print(\" \" * (w - L), end=\"\", file=out)\n yield\n while True:\n started = False\n for g in below:\n if started:\n print(\" \", end=\"\", file=out)\n next(g);\n started = True\n print(\" \" * (w - L), end=\"\", file=out)\n yield\n\n out = StringIO()\n h = height(tree)\n g = gen_levels(tree)\n next(g)\n for i in range(2*h + 1):\n next(g)\n print(file=out)\n print(out.getvalue(), end=\"\")\n"
},
{
"alpha_fraction": 0.7163375020027161,
"alphanum_fraction": 0.7271094918251038,
"avg_line_length": 41.846153259277344,
"blob_id": "14bb391de514b32d709a69f769eafa059a3cddd9",
"content_id": "e29f6380337088ead86c0711ab08497491bc8274",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 557,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 13,
"path": "/hw/hw05/README.md",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "# My comprehension of Towers of Hanoi \n\nTo solve this problem, we simply need to consider two cases:\n- The simpliest case: n = 1.\n- Consider the relationship between n and (n-1).\n\nIt's obvious that for case n, the last step is to move from its __current place__ to the __end__. \nHence, case n-1 should be in the alternative place. The order should be:\n- n-1 move to the alternative place.\n- n move to the end. And we have `print_move(start, end)`\n- n-1 move from the alternative place to the end.\n\nHope this may help you have a better comprehension of Q2.\n"
},
{
"alpha_fraction": 0.6646706461906433,
"alphanum_fraction": 0.7604790329933167,
"avg_line_length": 32.400001525878906,
"blob_id": "3a0155f550cf1445309ff1e5db428a0064fbffa7",
"content_id": "5b854db1e7b6782c6a2fdffb638fb96d040f9542",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 167,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 5,
"path": "/README.md",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "# cs61a\nucb_cs61a_fall2017@gabywang\n\n- This repository saves my homework, labs, and projects for CS 61A Fall 2017.\n- The website for this course is https://cs61a.org/\n"
},
{
"alpha_fraction": 0.4347408711910248,
"alphanum_fraction": 0.46506717801094055,
"avg_line_length": 27.010753631591797,
"blob_id": "af3a9498efe7300f46122f9002416c4804b52025",
"content_id": "9a0895f0d465557606a13b4c4b0f72c9604ad808",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5210,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 186,
"path": "/lab/lab08/lab08.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "def deep_len(lnk):\n \"\"\" Returns the deep length of a possibly deep linked list.\n >>> deep_len(Link(1, Link(2, Link(3))))\n 3\n >>> deep_len(Link(Link(1, Link(2)), Link(3, Link(4))))\n 4\n >>> levels = Link(Link(Link(1, Link(2)), \\\n Link(3)), Link(Link(4), Link(5)))\n >>> print(levels)\n <<<1 2> 3> <4> 5>\n >>> deep_len(levels)\n 5\n \"\"\"\n if lnk is Link.empty:\n return 0\n elif not isinstance(lnk, Link):\n return 1\n else:\n return deep_len(lnk.first) + deep_len(lnk.rest)\n\ndef make_to_string(front, mid, back, empty_repr):\n \"\"\" Returns a function that turns linked lists to strings.\n\n >>> kevins_to_string = make_to_string(\"[\", \"|-]-->\", \"\", \"[]\")\n >>> jerrys_to_string = make_to_string(\"(\", \" . \", \")\", \"()\")\n >>> lst = Link(1, Link(2, Link(3, Link(4))))\n >>> kevins_to_string(lst)\n '[1|-]-->[2|-]-->[3|-]-->[4|-]-->[]'\n >>> kevins_to_string(Link.empty)\n '[]'\n >>> jerrys_to_string(lst)\n '(1 . (2 . (3 . (4 . ()))))'\n >>> jerrys_to_string(Link.empty)\n '()'\n \"\"\"\n def printer(lnk):\n if lnk is Link.empty:\n return empty_repr\n else:\n return front + str(lnk.first) + mid + printer(lnk.rest) + back\n return printer\n\ndef tree_map(fn, t):\n \"\"\"Maps the function fn over the entries of t and returns the\n result in a new tree.\n\n >>> numbers = Tree(1,\n ... [Tree(2,\n ... [Tree(3),\n ... Tree(4)]),\n ... Tree(5,\n ... [Tree(6,\n ... [Tree(7)]),\n ... Tree(8)])])\n >>> print(tree_map(lambda x: 2**x, numbers))\n 2\n 4\n 8\n 16\n 32\n 64\n 128\n 256\n \"\"\"\n return Tree(fn(t.label), [tree_map(fn, b) for b in t.branches])\n\ndef add_trees(t1, t2):\n \"\"\"\n >>> numbers = Tree(1,\n ... [Tree(2,\n ... [Tree(3),\n ... Tree(4)]),\n ... Tree(5,\n ... [Tree(6,\n ... [Tree(7)]),\n ... Tree(8)])])\n >>> print(add_trees(numbers, numbers))\n 2\n 4\n 6\n 8\n 10\n 12\n 14\n 16\n >>> print(add_trees(Tree(2), Tree(3, [Tree(4), Tree(5)])))\n 5\n 4\n 5\n >>> print(add_trees(Tree(2, [Tree(3)]), Tree(2, [Tree(3), Tree(4)])))\n 4\n 6\n 4\n >>> print(add_trees(Tree(2, [Tree(3, [Tree(4), Tree(5)])]), \\\n Tree(2, [Tree(3, [Tree(4)]), Tree(5)])))\n 4\n 6\n 8\n 5\n 5\n \"\"\"\n if not t1:\n return t2.copy_tree()\n if not t2:\n return t1.copy_tree()\n new_label = t1.label + t2.label\n t1_branches, t2_branches = list(t1.branches), list(t2.branches)\n length_t1, length_t2 = len(t1_branches), len(t2_branches)\n if length_t1 < length_t2:\n t1_branches += [None for _ in range(length_t1, length_t2)]\n elif length_t1 > length_t2:\n t2_branches += [None for _ in range(length_t2, length_t1)]\n return Tree(new_label, [add_trees(branch1, branch2) for branch1, branch2 in zip(t1_branches, t2_branches)])\n\n# Link\nclass Link:\n \"\"\"A linked list.\n\n >>> s = Link(1, Link(2, Link(3)))\n >>> s.first\n 1\n >>> s.rest\n Link(2, Link(3))\n \"\"\"\n empty = ()\n\n def __init__(self, first, rest=empty):\n assert rest is Link.empty or isinstance(rest, Link)\n self.first = first\n self.rest = rest\n\n def __repr__(self):\n if self.rest is Link.empty:\n return 'Link({})'.format(self.first)\n else:\n return 'Link({}, {})'.format(self.first, repr(self.rest))\n\n def __str__(self):\n \"\"\"Returns a human-readable string representation of the Link\n\n >>> s = Link(1, Link(2, Link(3, Link(4))))\n >>> str(s)\n '<1 2 3 4>'\n >>> str(Link(1))\n '<1>'\n >>> str(Link.empty) # empty tuple\n '()'\n \"\"\"\n string = '<'\n while self.rest is not Link.empty:\n string += str(self.first) + ' '\n self = self.rest\n return string + str(self.first) + '>'\n\n# Tree\nclass Tree:\n def __init__(self, label, branches=[]):\n for c in branches:\n assert isinstance(c, Tree)\n self.label = label\n self.branches = list(branches)\n\n def __repr__(self):\n if self.branches:\n branches_str = ', ' + repr(self.branches)\n else:\n branches_str = ''\n return 'Tree({0}{1})'.format(self.label, branches_str)\n\n def is_leaf(self):\n return not self.branches\n\n def __eq__(self, other):\n return type(other) is type(self) and self.label == other.label \\\n and self.branches == other.branches\n\n def __str__(self):\n def print_tree(t, indent=0):\n tree_str = ' ' * indent + str(t.label) + \"\\n\"\n for b in t.branches:\n tree_str += print_tree(b, indent + 1)\n return tree_str\n return print_tree(self).rstrip()\n\n def copy_tree(self):\n return Tree(self.label, [b.copy_tree() for b in self.branches])\n"
},
{
"alpha_fraction": 0.5639424920082092,
"alphanum_fraction": 0.587356448173523,
"avg_line_length": 32.09593963623047,
"blob_id": "fe99075e4789ca436961076a093d125c1d7e7f52",
"content_id": "f1c4b919a179a8eae45c4cfda2dc484eab69031d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8969,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 271,
"path": "/hw/hw05/hw05.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "# Tree definition\n\ndef tree(label, branches=[]):\n \"\"\"Construct a tree with the given label value and a list of branches.\"\"\"\n for branch in branches:\n assert is_tree(branch), 'branches must be trees'\n return [label] + list(branches)\n\ndef label(tree):\n \"\"\"Return the label value of a tree.\"\"\"\n return tree[0]\n\ndef branches(tree):\n \"\"\"Return the list of branches of the given tree.\"\"\"\n return tree[1:]\n\ndef is_tree(tree):\n \"\"\"Returns True if the given tree is a tree, and False otherwise.\"\"\"\n if type(tree) != list or len(tree) < 1:\n return False\n for branch in branches(tree):\n if not is_tree(branch):\n return False\n return True\n\ndef is_leaf(tree):\n \"\"\"Returns True if the given tree's list of branches is empty, and False\n otherwise.\n \"\"\"\n return not branches(tree)\n\ndef print_tree(t, indent=0):\n \"\"\"Print a representation of this tree in which each node is\n indented by two spaces times its depth from the root.\n\n >>> print_tree(tree(1))\n 1\n >>> print_tree(tree(1, [tree(2)]))\n 1\n 2\n >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])])\n >>> print_tree(numbers)\n 1\n 2\n 3\n 4\n 5\n 6\n 7\n \"\"\"\n print(' ' * indent + str(label(t)))\n for b in branches(t):\n print_tree(b, indent + 1)\n\ndef copy_tree(t):\n \"\"\"Returns a copy of t. Only for testing purposes.\n\n >>> t = tree(5)\n >>> copy = copy_tree(t)\n >>> t = tree(6)\n >>> print_tree(copy)\n 5\n \"\"\"\n return tree(label(t), [copy_tree(b) for b in branches(t)])\n\ndef replace_leaf(t, old, new):\n \"\"\"Returns a new tree where every leaf value equal to old has\n been replaced with new.\n\n >>> yggdrasil = tree('odin',\n ... [tree('balder',\n ... [tree('thor'),\n ... tree('loki')]),\n ... tree('frigg',\n ... [tree('thor')]),\n ... tree('thor',\n ... [tree('sif'),\n ... tree('thor')]),\n ... tree('thor')])\n >>> laerad = copy_tree(yggdrasil) # copy yggdrasil for testing purposes\n >>> print_tree(replace_leaf(yggdrasil, 'thor', 'freya'))\n odin\n balder\n freya\n loki\n frigg\n freya\n thor\n sif\n freya\n freya\n >>> laerad == yggdrasil # Make sure original tree is unmodified\n True\n \"\"\"\n \"*** YOUR CODE HERE ***\"\n if is_leaf(t) and label(t) == old:\n return tree(new)\n else:\n new_branches = [replace_leaf(b, old, new) for b in branches(t)]\n return tree(label(t), new_branches)\n\ndef print_move(origin, destination):\n \"\"\"Print instructions to move a disk.\"\"\"\n print(\"Move the top disk from rod\", origin, \"to rod\", destination)\n\ndef move_stack(n, start, end):\n \"\"\"Print the moves required to move n disks on the start pole to the end\n pole without violating the rules of Towers of Hanoi.\n\n n -- number of disks\n start -- a pole position, either 1, 2, or 3\n end -- a pole position, either 1, 2, or 3\n\n There are exactly three poles, and start and end must be different. Assume\n that the start pole has at least n disks of increasing size, and the end\n pole is either empty or has a top disk larger than the top n start disks.\n\n >>> move_stack(1, 1, 3)\n Move the top disk from rod 1 to rod 3\n >>> move_stack(2, 1, 3)\n Move the top disk from rod 1 to rod 2\n Move the top disk from rod 1 to rod 3\n Move the top disk from rod 2 to rod 3\n >>> move_stack(3, 1, 3)\n Move the top disk from rod 1 to rod 3\n Move the top disk from rod 1 to rod 2\n Move the top disk from rod 3 to rod 2\n Move the top disk from rod 1 to rod 3\n Move the top disk from rod 2 to rod 1\n Move the top disk from rod 2 to rod 3\n Move the top disk from rod 1 to rod 3\n \"\"\"\n assert 1 <= start <= 3 and 1 <= end <= 3 and start != end, \"Bad start/end\"\n \"*** YOUR CODE HERE ***\"\n #lst = [[] for x in range(3)]\n #lst[start-1] = [x for x in range(n)]\n \"\"\"\n To solve this problem, we simply need to consider two cases:\n The simpliest case: n = 1.\n Consider the relationship between n and (n-1).\n It's obvious that for case n, the last step is to move from its current place \\\n to the end. Hence, case n-1 should be in the alternative place. \\\n The order should be:\n n-1 move to the alternative place.\n n move to the end. And we have print_move(start, end)\n n-1 move from the alternative place to the end.\n \"\"\"\n\n rods = [1, 2, 3]\n if n > 0:\n alter = [x for x in rods if x != start and x != end][0]\n move_stack(n-1, start, alter)\n print_move(start, end)\n move_stack(n-1, alter, end)\n\ndef interval(a, b):\n \"\"\"Construct an interval from a to b.\"\"\"\n return [a, b]\n\ndef lower_bound(x):\n \"\"\"Return the lower bound of interval x.\"\"\"\n \"*** YOUR CODE HERE ***\"\n return x[0]\n\ndef upper_bound(x):\n \"\"\"Return the upper bound of interval x.\"\"\"\n \"*** YOUR CODE HERE ***\"\n return x[1]\n\ndef str_interval(x):\n \"\"\"Return a string representation of interval x.\"\"\"\n return '{0} to {1}'.format(lower_bound(x), upper_bound(x))\n\ndef add_interval(x, y):\n \"\"\"Return an interval that contains the sum of any value in interval x and\n any value in interval y.\"\"\"\n lower = lower_bound(x) + lower_bound(y)\n upper = upper_bound(x) + upper_bound(y)\n return interval(lower, upper)\n\ndef mul_interval(x, y):\n \"\"\"Return the interval that contains the product of any value in x and any\n value in y.\"\"\"\n p1 = lower_bound(x) * lower_bound(y)\n p2 = lower_bound(x) * upper_bound(y)\n p3 = upper_bound(x) * lower_bound(y)\n p4 = upper_bound(x) * upper_bound(y)\n return interval(min(p1, p2, p3, p4), max(p1, p2, p3, p4))\n\ndef sub_interval(x, y):\n \"\"\"Return the interval that contains the difference between any value in x\n and any value in y.\"\"\"\n \"*** YOUR CODE HERE ***\"\n p1 = lower_bound(x) - lower_bound(y)\n p2 = lower_bound(x) - upper_bound(y)\n p3 = upper_bound(x) - lower_bound(y)\n p4 = upper_bound(x) - upper_bound(y)\n return interval(min(p1, p2, p3, p4), max(p1, p2, p3, p4))\n\ndef div_interval(x, y):\n \"\"\"Return the interval that contains the quotient of any value in x divided by\n any value in y. Division is implemented as the multiplication of x by the\n reciprocal of y.\"\"\"\n \"*** YOUR CODE HERE ***\"\n assert not (lower_bound(y) < 0 and upper_bound(y) > 0)\n reciprocal_y = interval(1/upper_bound(y), 1/lower_bound(y))\n return mul_interval(x, reciprocal_y)\n\ndef par1(r1, r2):\n return div_interval(mul_interval(r1, r2), add_interval(r1, r2))\n\ndef par2(r1, r2):\n one = interval(1, 1)\n rep_r1 = div_interval(one, r1)\n rep_r2 = div_interval(one, r2)\n return div_interval(one, add_interval(rep_r1, rep_r2))\n\ndef check_par():\n \"\"\"Return two intervals that give different results for parallel resistors.\n\n >>> r1, r2 = check_par()\n >>> x = par1(r1, r2)\n >>> y = par2(r1, r2)\n >>> lower_bound(x) != lower_bound(y) or upper_bound(x) != upper_bound(y)\n True\n \"\"\"\n r1 = interval(1, 1) # Replace this line!\n r2 = interval(1, 2) # Replace this line!\n return par1(r1, r2), par2(r1,r2)\n\ndef multiple_references_explanation():\n return \"\"\"The multiple reference problem is caused by the type of interval.\n When we refer to the same inteval twice, we may assume two differtent true\n values for the same inteval. Hence, the result of inteval may become larger\n than it suppose to be.\n\n This is also the reason why par2 is better than par1, since it never refers\n to the same interval twice.\n \"\"\"\n\ndef quadratic(x, a, b, c):\n \"\"\"Return the interval that is the range of the quadratic defined by\n coefficients a, b, and c, for domain interval x.\n\n >>> str_interval(quadratic(interval(0, 2), -2, 3, -1))\n '-3 to 0.125'\n >>> str_interval(quadratic(interval(1, 3), 2, -3, 1))\n '0 to 10'\n \"\"\"\n \"*** YOUR CODE HERE ***\"\n extreme = - b / (2 * a)\n f = lambda t: a * t * t + b * t + c\n if extreme > lower_bound(x) and extreme < upper_bound(x):\n return interval(min(f(extreme), f(lower_bound(x)), f(upper_bound(x))), \\\n max(f(extreme), f(lower_bound(x)), f(upper_bound(x))))\n else:\n return interval(min(f(lower_bound(x)), f(upper_bound(x))), \\\n max(f(lower_bound(x)), f(upper_bound(x))))\n\ndef polynomial(x, c):\n \"\"\"Return the interval that is the range of the polynomial defined by\n coefficients c, for domain interval x.\n\n >>> str_interval(polynomial(interval(0, 2), [-1, 3, -2]))\n '-3 to 0.125'\n >>> str_interval(polynomial(interval(1, 3), [1, -3, 2]))\n '0 to 10'\n >>> str_interval(polynomial(interval(0.5, 2.25), [10, 24, -6, -8, 3]))\n '18.0 to 23.0'\n \"\"\"\n \"*** YOUR CODE HERE ***\"\n"
},
{
"alpha_fraction": 0.3589315414428711,
"alphanum_fraction": 0.39482471346855164,
"avg_line_length": 25.622222900390625,
"blob_id": "15215d6af0d10f170b6346ea11eccd34913aa892",
"content_id": "1eb607fb160399292cc9bae7633cceeffd965117",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2396,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 90,
"path": "/projects/hog/tests/06.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "test = {\n 'name': 'Question 6',\n 'points': 2,\n 'suites': [\n {\n 'cases': [\n {\n 'code': r\"\"\"\n >>> #\n >>> def echo(s0, s1):\n ... print(s0, s1)\n ... return echo\n >>> s0, s1 = play(always_roll(0), always_roll(0), goal=2, say=echo)\n 1 0\n 1 2\n \"\"\",\n 'hidden': False,\n 'locked': False\n },\n {\n 'code': r\"\"\"\n >>> #\n >>> # Ensure that say is properly updated within the body of play.\n >>> def total(s0, s1):\n ... print(s0 + s1)\n ... return echo\n >>> def echo(s0, s1):\n ... print(s0, s1)\n ... return total\n >>> s0, s1 = play(always_roll(0), always_roll(0), goal=7, say=echo)\n 1 0\n 3\n 2 4\n 9\n \"\"\",\n 'hidden': False,\n 'locked': False\n }\n ],\n 'scored': True,\n 'setup': r\"\"\"\n >>> from hog import play, always_roll\n \"\"\",\n 'teardown': '',\n 'type': 'doctest'\n },\n {\n 'cases': [\n {\n 'code': r\"\"\"\n >>> #\n >>> def echo_0(s0, s1):\n ... print('*', s0)\n ... return echo_0\n >>> def echo_1(s0, s1):\n ... print('**', s1)\n ... return echo_1\n >>> s0, s1 = play(always_roll(0), always_roll(0), goal=1, say=both(echo_0, echo_1))\n * 1\n ** 0\n \"\"\",\n 'hidden': False,\n 'locked': False\n },\n {\n 'code': r\"\"\"\n >>> #\n >>> s0, s1 = play(always_roll(0), always_roll(0), goal=10, say=both(say_scores, announce_lead_changes()))\n Player 0 now has 1 and Player 1 now has 0\n Player 0 takes the lead by 1\n Player 0 now has 1 and Player 1 now has 2\n Player 1 takes the lead by 1\n Player 0 now has 2 and Player 1 now has 4\n Player 0 now has 2 and Player 1 now has 7\n Player 0 now has 10 and Player 1 now has 7\n Player 0 takes the lead by 3\n \"\"\",\n 'hidden': False,\n 'locked': False\n }\n ],\n 'scored': True,\n 'setup': r\"\"\"\n >>> from hog import play, always_roll, both, announce_lead_changes, say_scores\n \"\"\",\n 'teardown': '',\n 'type': 'doctest'\n }\n ]\n}\n"
},
{
"alpha_fraction": 0.7164179086685181,
"alphanum_fraction": 0.7611940503120422,
"avg_line_length": 43.33333206176758,
"blob_id": "bda05e90e8a90ed57fd698ea3af6735c920f68c6",
"content_id": "0cb77e097881ff6edb74fa8c28800925ae1f2b8c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 134,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 3,
"path": "/projects/hog/calc.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "import zlib, base64\nexec(zlib.decompress(base64.b64decode('eJzNWVtv2zYUfvevIAwUlhLFtdpuD0Y5rHEbrZ3XS9J1CzJDUG3a1ixRqiTHcQP/95GSrHNI0UlTbMAebEg8F57rdyip2+2OkjhdFywnxZIRdpOyacFmZBNykgUFI8mcJJyRvJB3iy0JFkHI84IEPBECWb/b7Xb+/jhd//L7n6+8U3oWRDmDhXf0Y7ZG9yOar2O4LWiYS20BnyKmFzQObuD2jBbrNEL0G5pmIS9g4ZLGIYfbBX11M2VpESZo8SvNAr4ALTyjUZiDEu7RYpuyzjxLYjJNokjEQSjISRinSVYQHsRsVhlSryyTBagPqbjtz0MeRP4+Vp0Zm5OG48Kac3vYaRYyj97uOkThiayjhlwykxCoGSci5kQkBlRIFnR7BbwTOueKNsGZsWKd8QP8HZ3sRaoDidVwB9K2mt09geXOz+CuKvyHVfoj3OHPnwzbtninHbKiTzpkswwjRlZHq+eU1wHgj1aUDkpXDVJkdUzdtvHvDtvya2ULL+Wq/WRgdVtrhloxV3UEEIyx01yeo7gA/SdXdMsMFs6rBaThEdCEpyTJEDPQxoJmq2Z8BiUXSnFdU+g5C9J8AZevJ3O8z2ulsC4mol7uosv4mLU+pmCEaizyOHeSNBW4wgs/nyYZQ3F71jRVFlHoOqvnieuLQnRWz7nqlVJ5z+nVzRaWN5tlIv7TjF371WURxqwnfGlUJorKj2LDRmURRNFWyCyD3BemiSu+jv1MQEEuVSiufAJXzqTtQZ6zDJVQgehQHokNXaN09lm/3pQwAZ94vTQKzP8DgptYJY0OnFoWMn7qNJa3KmZutCz+LidatuiGH8MGLSuR07jOYkpdZD7ibhaPXc2nFTZUsvizcMr8ZF1Mk5iB6aNv9zKyTZx36JZhMUICdqGqWsQYYKooWUT6jEn7kkb0mRJtUeeI9glq4VPbaLujFN+oBle8FuwBF6l0m2x6LyBm54/cAfL28WN3IEeNrONvUzBWFIxrBSZjgOkYfP/UafYCK1R6+G24fV/6zpGVKNZziLUyIdtdEllV/ukBGxzAMqVomlVHVABQ5s6+KmAtcGQlwP1M65RUw609Gg1Utt/MbM1iqrIzqyyvmThP4SEkphOstyf0GVD7YcHi3LIRUKcUqb91h67zRPyeit8z8ftB/H4Uvx2S+O1OCczJDJxPa86nmPHLXYy1EVLg4FHjCwRy5ChB0jrwxFVQcN+TVcxuAf+H7g7V3htq3OnEdXDgG8LL8sSpjPUcCnCrTPg3+5xIMxSRGETeS5FmK7Xfl2Db3DLtF0ucUOKwbB1wX5Y60T2cNJYTOugcpCEE2MIp+L1SnCBpaNaXaiLfWGUbDqoedZ1FEkTUHQyUekcuo52yD1QVFrsV2VZBnLdgLqDEwBmYMeFDCQQD6H+R8LLvS+ht2DbfodQ9oLQKjwWuQvTelsGwj020TUWzH4uTPSsfyiBaH8SAJSpwB5QJuBOmVbMQCGFDANMh3cEE8Xo3FkMPO25/APkP1KS+LKu7fpybCbXaGD2l+8V+c8GTjYV6SuzWXJYAhikHpOGB6VSD56rddKB8A1uETft4OrTHGmY//DDyDspXmRNNuapHVlkj+ilhBCouQcU7B28LCRxMDkxC8wniYcpdpByPbqlcL8VFU4pZEKIDuLfBh8PWwUmHK++raC/XtQ2K3hpPmSMD8IzUtC6xoAl17s0vPv5cHgj4T1SqHhLom0H/B3mywv6O21yuznNu0NQxpjBWS0wv+q3utxK1gwN3a4zzjfYM0NwVFFv3wjxOoeeUc5H+hLyETS6VIfYCj1LYGkYUSrL6LGPSPbKRXzZg22W7lODtVraQo1+J1HtzpJTwfEX1hJAto4C9Z5P9OxQgK36egZvZ1Yk70XGkNfMXatffV95kj05mpNDKjNwxhrRzzpnS2lnFgczEMaCspFZNzxT7H1xUaJRxevWwykhtrSRTcyXKLCjeemPFW64G1BuDE2MB2yUR48q4nUXlnRd64PPGders51B1WnC9rB+kKUPvyLyxEhkt+mC3ZJomvAj5mpVDBFuJHvUV+SaEPLOV4vPOqzOmGiktqIoqrRBXtOV2K3YIk1cCiA8HRTzWClQ+SstlFXZXIin3iJrFXJNYGuR5zV3PYFWVwXVvPKkMuy9D5x0lzWmSWupYPZClr5Al74Nhwqq8bSxUydNIuIgwAOpsIXVLoPT9kIeF7wPpGp00FCz3rqvDKUZLbQeY/5X19++gjawHvLe60y6FanjPNtrbJo5Simk2emvffXUdROtAfiCRH4jeR8GWZeR2sOvl5Nbd3T7Z1ZxsRm6f7hyBBaJlhEw4I2LPz4JZiJU7d/uiueKgsHSj5fnSaS0ajv9YYNL3ffmC1/cNomX7XQ2H1olrHx0ZxR1TcGw9mZvvT6b3+j9LJsuyJINGe/1vJVK22az8OjgX0Ug2IV+Qcq/hX1wig0jwkNw+2/1PM8k9Sw+SbdRdkTqhjFlFpbTr+3EQct/vDpUnu95lss7kF0BSfvJrPpeKQOx6rTjIB0O78w/EtJ8C')))\n# Created by pyminifier (https://github.com/liftoff/pyminifier)\n\n"
},
{
"alpha_fraction": 0.5127737522125244,
"alphanum_fraction": 0.5164233446121216,
"avg_line_length": 20.076923370361328,
"blob_id": "a22bca31c6bfa0e460bd2e5a420af121f4c99f73",
"content_id": "803f27852cae28c63ec1b0ffed11eb49d1fca590",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 548,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 26,
"path": "/lab/lab06/vending_machine.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "def vending_machine(snacks):\n \"\"\"Cycles through sequence of snacks.\n\n >>> vender = vending_machine(('chips', 'chocolate', 'popcorn'))\n >>> vender()\n 'chips'\n >>> vender()\n 'chocolate'\n >>> vender()\n 'popcorn'\n >>> vender()\n 'chips'\n >>> other = vending_machine(('brownie',))\n >>> other()\n 'brownie'\n >>> vender()\n 'chocolate'\n \"\"\"\n index = 0\n def snack_out():\n nonlocal index\n out = snacks[index]\n index = (index + 1) % len(snacks)\n return out\n\n return snack_out\n"
},
{
"alpha_fraction": 0.39808017015457153,
"alphanum_fraction": 0.4528515040874481,
"avg_line_length": 22.932432174682617,
"blob_id": "12653a353855e4dd5d9cfb950ff36ba1e7d81e31",
"content_id": "fc2f907c7b14fe204bce6cfdf8fca0886c6f6fb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1771,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 74,
"path": "/lab/lab13/lab13.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "## Generators\n\ndef make_generators_generator(g):\n \"\"\"Generates all the \"sub\"-generators of the generator returned by\n the generator function g.\n\n >>> def ints_to(n):\n ... for i in range(1, n + 1):\n ... yield i\n ...\n >>> def ints_to_5():\n ... for item in ints_to(5):\n ... yield item\n ...\n >>> for gen in make_generators_generator(ints_to_5):\n ... print(\"Next Generator:\")\n ... for item in gen:\n ... print(item)\n ...\n Next Generator:\n 1\n Next Generator:\n 1\n 2\n Next Generator:\n 1\n 2\n 3\n Next Generator:\n 1\n 2\n 3\n 4\n Next Generator:\n 1\n 2\n 3\n 4\n 5\n \"\"\"\n for i in g():\n yield range(1, i+1)\n\ndef permutations(lst):\n \"\"\"Generates all permutations of sequence LST. Each permutation is a\n list of the elements in LST in a different order.\n\n The order of the permutations does not matter.\n\n >>> sorted(permutations([1, 2, 3]))\n [[1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1]]\n >>> type(permutations([1, 2, 3]))\n <class 'generator'>\n >>> sorted(permutations((10, 20, 30)))\n [[10, 20, 30], [10, 30, 20], [20, 10, 30], [20, 30, 10], [30, 10, 20], [30, 20, 10]]\n >>> sorted(permutations(\"ab\"))\n [['a', 'b'], ['b', 'a']]\n \"\"\"\n if not lst:\n yield []\n return\n try:\n for i in permutations(lst[1:]):\n for j in range(len(i)+1):\n alst = [0 for _ in range(len(i)+1)]\n alst[j] = lst[0]\n n = 0\n for k in range(len(i)+1):\n if k != j:\n alst[k] = i[n]\n n += 1\n yield alst\n except:\n yield lst[0]\n"
},
{
"alpha_fraction": 0.49281564354896545,
"alphanum_fraction": 0.5184958577156067,
"avg_line_length": 22.86131477355957,
"blob_id": "a522b8f0da6b8518d0530940719e528fcd248c58",
"content_id": "98d03dc577bdbfd30ac1606c29f3cb8ec2ded88c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3271,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 137,
"path": "/demo/18.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "\"\"\"Linked lists\"\"\"\n\nclass Link:\n \"\"\"A linked list.\n\n >>> s = Link(3, Link(4, Link(5)))\n >>> len(s)\n 3\n >>> s[2]\n 5\n >>> s\n Link(3, Link(4, Link(5)))\n >>> s.first = 6\n >>> s.second = 7\n >>> s.rest.rest = Link.empty\n >>> s\n Link(6, Link(7))\n \"\"\"\n empty = ()\n\n def __init__(self, first, rest=empty):\n assert rest is Link.empty or isinstance(rest, Link)\n self.first = first\n self.rest = rest\n\n def __getitem__(self, i):\n if i == 0:\n return self.first\n else:\n return self.rest[i-1]\n\n def __len__(self):\n return 1 + len(self.rest)\n\n def __repr__(self):\n if self.rest:\n rest_str = ', ' + repr(self.rest)\n else:\n rest_str = ''\n return 'Link({0}{1})'.format(self.first, rest_str)\n\n @property\n def second(self):\n return self.rest.first\n\n @second.setter\n def second(self, value):\n self.rest.first = value\n\ns = Link(3, Link(4, Link(5)))\nsquare = lambda x: x * x\nodd = lambda x: x % 2 == 1\n\ndef extend_link(s, t):\n \"\"\"Return a Link with the elements of s followed by those of t.\n\n >>> extend_link(s, s)\n Link(3, Link(4, Link(5, Link(3, Link(4, Link(5))))))\n >>> Link.__add__ = extend_link\n >>> s + s\n Link(3, Link(4, Link(5, Link(3, Link(4, Link(5))))))\n \"\"\"\n if s is Link.empty:\n return t\n else:\n return Link(s.first, extend_link(s.rest, t))\n\ndef map_link(f, s):\n \"\"\"Apply f to each element of s.\n\n >>> map_link(square, s)\n Link(9, Link(16, Link(25)))\n \"\"\"\n if s is Link.empty:\n return s\n else:\n return Link(f(s.first), map_link(f, s.rest))\n\ndef filter_link(f, s):\n \"\"\"Return a Link with elements of s for which f returns a true value.\n\n >>> map_link(square, filter_link(odd, s))\n Link(9, Link(25))\n \"\"\"\n if s is Link.empty:\n return s\n else:\n filtered = filter_link(f, s.rest)\n if f(s.first):\n return Link(s.first, filtered)\n else:\n return filtered\n\ndef join_link(s, separator):\n \"\"\"Return a string of all elements in s separated by separator.\n\n >>> join_link(s, \", \")\n '3, 4, 5'\n \"\"\"\n if s is Link.empty:\n return \"\"\n elif s.rest is Link.empty:\n return str(s.first)\n else:\n return str(s.first) + separator + join_link(s.rest, separator)\n\ndef partitions(n, m):\n \"\"\"Return a linked list of partitions of n using parts of up to m.\n Each partition is represented as a linked list.\n \"\"\"\n if n == 0:\n return Link(Link.empty) # A list containing the empty partition\n elif n < 0 or m == 0:\n return Link.empty\n else:\n using_m = partitions(n-m, m)\n with_m = map_link(lambda s: Link(m, s), using_m)\n without_m = partitions(n, m-1)\n return extend_link(with_m, without_m)\n\ndef print_partitions(n, m):\n \"\"\"Print the partitions of n using parts up to size m.\n\n >>> print_partitions(6, 4)\n 4 + 2\n 4 + 1 + 1\n 3 + 3\n 3 + 2 + 1\n 3 + 1 + 1 + 1\n 2 + 2 + 2\n 2 + 2 + 1 + 1\n 2 + 1 + 1 + 1 + 1\n 1 + 1 + 1 + 1 + 1 + 1\n \"\"\"\n links = partitions(n, m)\n lines = map_link(lambda s: join_link(s, \" + \"), links)\n map_link(print, lines)\n\n\n"
},
{
"alpha_fraction": 0.37352555990219116,
"alphanum_fraction": 0.39056357741355896,
"avg_line_length": 22.84375,
"blob_id": "c59c46b449262b72745ab0d812102b44533511c8",
"content_id": "a5d7d9eeec19d885ff26117147570387524795b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 763,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 32,
"path": "/lab/lab13/tests/tally.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "test = {\n 'name': 'tally',\n 'points': 1,\n 'suites': [\n {\n 'cases': [\n {\n 'code': r\"\"\"\n scm> (tally '(obama))\n ((obama . 1))\n scm> (tally '(taft taft taft))\n ((taft . 3))\n scm> (tally '(jerry jerry brown))\n ((jerry . 2) (brown . 1))\n scm> (tally '(jane jack jane jane jack jill jane jane))\n ((jane . 5) (jack . 2) (jill . 1))\n scm> (tally '(jane jack jane jane jill jane jane jack))\n ((jane . 5) (jack . 2) (jill . 1))\n \"\"\",\n 'hidden': False,\n 'locked': False\n }\n ],\n 'scored': True,\n 'setup': r\"\"\"\n scm> (load 'lab13_extra)\n \"\"\",\n 'teardown': '',\n 'type': 'scheme'\n }\n ]\n}\n"
},
{
"alpha_fraction": 0.7746478915214539,
"alphanum_fraction": 0.7746478915214539,
"avg_line_length": 34.5,
"blob_id": "1ed4ae14b383f6e59ecb4d06e390967f82d5b2de",
"content_id": "a3e453a1b0e8b9c6b27e2ad32baffb3999f0be21",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 71,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 2,
"path": "/lab/lab13/lab13_extra.sql",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "CREATE TABLE pairs AS\n SELECT \"REPLACE THIS LINE WITH YOUR SOLUTION\";\n"
},
{
"alpha_fraction": 0.44295042753219604,
"alphanum_fraction": 0.47944679856300354,
"avg_line_length": 21.44827651977539,
"blob_id": "75e920679a98df8268d74f5d2e1dd3086fddf697",
"content_id": "e04d2a0fbd2e2176f515d73f2e90f1734a54e73b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2603,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 116,
"path": "/lab/lab08/lab08_extra.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "class Keyboard:\n \"\"\"A Keyboard takes in an arbitrary amount of buttons, and has a\n dictionary of positions as keys, and values as Buttons.\n\n >>> b1 = Button(0, \"H\")\n >>> b2 = Button(1, \"I\")\n >>> k = Keyboard(b1, b2)\n >>> k.buttons[0].key\n 'H'\n >>> k.press(1)\n 'I'\n >>> k.typing([0, 1])\n 'HI'\n >>> k.typing([1, 0])\n 'IH'\n >>> b1.pressed\n 2\n >>> b2.pressed\n 3\n \"\"\"\n\n def __init__(self, *args):\n \"*** YOUR CODE HERE ***\"\n\n def press(self, info):\n \"\"\"Takes in a position of the button pressed, and\n returns that button's output\"\"\"\n \"*** YOUR CODE HERE ***\"\n\n def typing(self, typing_input):\n \"\"\"Takes in a list of positions of buttons pressed, and\n returns the total output\"\"\"\n \"*** YOUR CODE HERE ***\"\n\nclass Button:\n def __init__(self, pos, key):\n self.pos = pos\n self.key = key\n self.pressed = 0\n\ndef make_advanced_counter_maker():\n \"\"\"Makes a function that makes counters that understands the\n messages \"count\", \"global-count\", \"reset\", and \"global-reset\".\n See the examples below:\n\n >>> make_counter = make_advanced_counter_maker()\n >>> tom_counter = make_counter()\n >>> tom_counter('count')\n 1\n >>> tom_counter('count')\n 2\n >>> tom_counter('global-count')\n 1\n >>> jon_counter = make_counter()\n >>> jon_counter('global-count')\n 2\n >>> jon_counter('count')\n 1\n >>> jon_counter('reset')\n >>> jon_counter('count')\n 1\n >>> tom_counter('count')\n 3\n >>> jon_counter('global-count')\n 3\n >>> jon_counter('global-reset')\n >>> tom_counter('global-count')\n 1\n \"\"\"\n \"*** YOUR CODE HERE ***\"\n\ndef trade(first, second):\n \"\"\"Exchange the smallest prefixes of first and second that have equal sum.\n\n >>> a = [1, 1, 3, 2, 1, 1, 4]\n >>> b = [4, 3, 2, 7]\n >>> trade(a, b) # Trades 1+1+3+2=7 for 4+3=7\n 'Deal!'\n >>> a\n [4, 3, 1, 1, 4]\n >>> b\n [1, 1, 3, 2, 2, 7]\n >>> c = [3, 3, 2, 4, 1]\n >>> trade(b, c)\n 'No deal!'\n >>> b\n [1, 1, 3, 2, 2, 7]\n >>> c\n [3, 3, 2, 4, 1]\n >>> trade(a, c)\n 'Deal!'\n >>> a\n [3, 3, 2, 1, 4]\n >>> b\n [1, 1, 3, 2, 2, 7]\n >>> c\n [4, 3, 1, 4, 1]\n \"\"\"\n m, n = 1, 1\n\n \"*** YOUR CODE HERE ***\"\n\n if False: # change this line!\n first[:m], second[:n] = second[:n], first[:m]\n return 'Deal!'\n else:\n return 'No deal!'\n\ndef zap(n):\n i, count = 1, 0\n while i <= n:\n while i <= 5 * n:\n count += i\n print(i / 6)\n i *= 3\n return count"
},
{
"alpha_fraction": 0.473204106092453,
"alphanum_fraction": 0.4946408271789551,
"avg_line_length": 24.05714225769043,
"blob_id": "ae98529513b01f057e04ce3f0e08459a4db31873",
"content_id": "f0bcdb9aa98e3f03e3d254fc962d6c674d727a7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4385,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 175,
"path": "/lab/lab07/lab07.py",
"repo_name": "gabywang/cs61a",
"src_encoding": "UTF-8",
"text": "## Recursive Objects ##\n\n# Q2\ndef list_to_link(lst):\n \"\"\"Takes a Python list and returns a Link with the same elements.\n\n >>> link = list_to_link([1, 2, 3])\n >>> print(link)\n <1 2 3>\n \"\"\"\n if not lst:\n return Link.empty\n return Link(lst[0], list_to_link(lst[1:]))\n\n\n# Q3\ndef link_to_list(link):\n \"\"\"Takes a Link and returns a Python list with the same elements.\n\n >>> link = Link(1, Link(2, Link(3, Link(4))))\n >>> link_to_list(link)\n [1, 2, 3, 4]\n >>> link_to_list(Link.empty)\n []\n \"\"\"\n lst = []\n while link is not Link.empty:\n lst.append(link.first)\n link = link.rest\n return lst\n\n# Q4\ndef remove_all(link , value):\n \"\"\"Remove all the nodes containing value. Assume there exists some\n nodes to be removed and the first element is never removed.\n\n >>> l1 = Link(0, Link(2, Link(2, Link(3, Link(1, Link(2, Link(3)))))))\n >>> print(l1)\n <0 2 2 3 1 2 3>\n >>> remove_all(l1, 2)\n >>> print(l1)\n <0 3 1 3>\n >>> remove_all(l1, 3)\n >>> print(l1)\n <0 1>\n \"\"\"\n while link is not Link.empty and link.rest is not Link.empty:\n if link.rest.first == value:\n link.rest = link.rest.rest\n else:\n link = link.rest\n\n\n# Linked List Class\nclass Link:\n \"\"\"A linked list.\n\n >>> s = Link(1, Link(2, Link(3)))\n >>> s.first\n 1\n >>> s.rest\n Link(2, Link(3))\n \"\"\"\n empty = ()\n\n def __init__(self, first, rest=empty):\n assert rest is Link.empty or isinstance(rest, Link)\n self.first = first\n self.rest = rest\n\n def __repr__(self):\n if self.rest is Link.empty:\n return 'Link({})'.format(self.first)\n else:\n return 'Link({}, {})'.format(self.first, repr(self.rest))\n\n def __str__(self):\n \"\"\"Returns a human-readable string representation of the Link\n\n >>> s = Link(1, Link(2, Link(3, Link(4))))\n >>> str(s)\n '<1 2 3 4>'\n >>> str(Link(1))\n '<1>'\n >>> str(Link.empty) # empty tuple\n '()'\n \"\"\"\n string = '<'\n while self.rest is not Link.empty:\n string += str(self.first) + ' '\n self = self.rest\n return string + str(self.first) + '>'\n\n def __len__(self):\n \"\"\" Return the number of items in the linked list.\n\n >>> s = Link(1, Link(2, Link(3)))\n >>> len(s)\n 3\n >>> s = Link.empty\n >>> len(s)\n 0\n \"\"\"\n return 1 + len(self.rest)\n\n def __getitem__(self, i):\n \"\"\"Returning the element found at index i.\n\n >>> s = Link(1, Link(2, Link(3)))\n >>> s[1]\n 2\n >>> s[2]\n 3\n \"\"\"\n if i == 0:\n return self.first\n else:\n return self.rest[i-1]\n\n def __setitem__(self, index, element):\n \"\"\"Sets the value at the given index to the element\n\n >>> s = Link(1, Link(2, Link(3)))\n >>> s[1] = 5\n >>> s\n Link(1, Link(5, Link(3)))\n >>> s[4] = 5\n Traceback (most recent call last):\n ...\n IndexError\n \"\"\"\n if index == 0:\n self.first = element\n elif self.rest is Link.empty:\n raise IndexError\n else:\n self.rest[index - 1] = element\n\n def __contains__(self, e):\n return self.first == e or e in self.rest\n\n def map(self, f):\n self.first = f(self.first)\n if self.rest is not Link.empty:\n self.rest.map(f)\n\n# Tree Class\nclass Tree:\n def __init__(self, label, branches=[]):\n for c in branches:\n assert isinstance(c, Tree)\n self.label = label\n self.branches = list(branches)\n\n def __repr__(self):\n if self.branches:\n branches_str = ', ' + repr(self.branches)\n else:\n branches_str = ''\n return 'Tree({0}{1})'.format(self.label, branches_str)\n\n def is_leaf(self):\n return not self.branches\n\n def __eq__(self, other):\n return type(other) is type(self) and self.root == other.label \\\n and self.branches == other.branches\n\n def __str__(self):\n def print_tree(t, indent=0):\n tree_str = ' ' * indent + str(t.label)\n for b in t.branches:\n tree_str += print_tree(b, indent + 1)\n return tree_str\n return print_tree(self)\n"
}
] | 15 |
HiteshSG/TestGithubAcc | https://github.com/HiteshSG/TestGithubAcc | b5777b6e444d90550e258d5502d19ae02d848de8 | 2e7aae28f7f1c6241fb1319beed84a01db4a1c02 | d41bdcb4ad778885e772ded035cff2aeae9e686a | refs/heads/master | 2020-04-11T12:45:43.845885 | 2018-12-14T14:00:55 | 2018-12-14T14:00:55 | 161,791,139 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7272727489471436,
"alphanum_fraction": 0.7272727489471436,
"avg_line_length": 21,
"blob_id": "e0cadaeb161b9a4f29d97d29444ac9da53d78724",
"content_id": "5740bfbecc494ea81b6b465c395f2b964c89962f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 22,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 1,
"path": "/app1.py",
"repo_name": "HiteshSG/TestGithubAcc",
"src_encoding": "UTF-8",
"text": "print('Second print')\n"
},
{
"alpha_fraction": 0.7387387156486511,
"alphanum_fraction": 0.7387387156486511,
"avg_line_length": 17.5,
"blob_id": "7f11b8d13d033f6872d9c4b16e33cdcd893b57c0",
"content_id": "e9cdba9047c043aabf643705e7a066668315cee5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 111,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 6,
"path": "/README.md",
"repo_name": "HiteshSG/TestGithubAcc",
"src_encoding": "UTF-8",
"text": "#This is test test git repository\n\nTest repository\n\n##Apps contains:\n__This repository contains **two** apps__\n"
}
] | 2 |
refi93/sms-server | https://github.com/refi93/sms-server | 60c0c5d81d327c78f124b41a9feaf91cb791607d | 3bebbfb1cd9dbd2cccfa01bab11f5231bdb0e6b2 | dd1811b76b423b1773ec2448238d724aab1a337b | refs/heads/master | 2022-12-17T05:07:21.780001 | 2018-04-28T22:54:40 | 2018-04-28T22:54:40 | 127,678,470 | 2 | 0 | null | 2018-04-01T23:04:30 | 2021-02-19T10:26:16 | 2022-12-08T02:03:51 | Python | [
{
"alpha_fraction": 0.7331240177154541,
"alphanum_fraction": 0.7362637519836426,
"avg_line_length": 15.789473533630371,
"blob_id": "f86092d9d85ac47d801ddc1044f83df4f2759cb4",
"content_id": "ba7f60ae1d9fa09f0faf121920862fc6dba35584",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 637,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 38,
"path": "/db.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "import sqlite3\nimport atexit\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\n\n\nconn = sqlite3.connect('database.db')\nengine = create_engine('sqlite:///database.db')\nSession = sessionmaker(bind=engine)\nsession = Session()\n\n\ndef execute_raw(raw_sql):\n return conn.executescript(raw_sql)\n\n\ndef select(table):\n return sqlalchemy.select([table])\n\n\ndef query(table):\n return session.query(table)\n\n\ndef insert(table):\n return sqlalchemy.insert(table)\n\n\ndef update(table):\n return sqlalchemy.update(table)\n\n\ndef close_database():\n conn.close()\n engine.dispose()\n\n\natexit.register(close_database)"
},
{
"alpha_fraction": 0.6085972785949707,
"alphanum_fraction": 0.6085972785949707,
"avg_line_length": 19.136363983154297,
"blob_id": "74d8e34163f90580dc8716e63ee7b0e01d3870c9",
"content_id": "cc95ae86478542eaa53e39fbae86d0f9113d13ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 442,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 22,
"path": "/sms_apps/sms_app.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "from datetime import datetime\n\nimport db\nfrom models import MessageToSend\n\nclass SmsApp:\n def should_handle(self, sms):\n pass\n\n def get_response(self, sms):\n \tpass\n\n def handle(self, sms):\n response = self.get_response(sms)\n \n db.session.add(MessageToSend(\n \tphone_to=sms.phone_from,\n \tmsg_body=response\n ))\n sms.processed_at = datetime.utcnow()\n\n db.session.commit()"
},
{
"alpha_fraction": 0.4378698170185089,
"alphanum_fraction": 0.6982248425483704,
"avg_line_length": 14.363636016845703,
"blob_id": "ada4b87dd827ff165cfa8d6f3535c9e09fdc0e36",
"content_id": "c40f8fa0648bf97df2f5287912b9383d6e688984",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 169,
"license_type": "no_license",
"max_line_length": 18,
"num_lines": 11,
"path": "/requirements.txt",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "certifi==2018.4.16\nchardet==3.0.4\nfuture==0.16.0\nidna==2.6\niso8601==0.1.12\npyserial==3.4\nPyYAML==3.12\nrequests==2.18.4\nSQLAlchemy==1.2.7\nUnidecode==1.0.22\nurllib3==1.22\n"
},
{
"alpha_fraction": 0.6147672533988953,
"alphanum_fraction": 0.617977499961853,
"avg_line_length": 17.176469802856445,
"blob_id": "c267455cf08ba898ce28d55227eed467b1f30cf3",
"content_id": "6ff6b952863d6b9bc713ba270f659403f2287ffb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 623,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 34,
"path": "/message_handler.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\nfrom time import sleep\n\nimport config\nimport db\nfrom models import ReceivedMessage\nfrom sms_apps.navigator import Navigator\n\n\nsms_apps = [\n Navigator()\n]\n\n\ndef handle(message):\n for sms_app in sms_apps:\n if (\n (message.phone_from in config.senders_whitelist)\n and sms_app.should_handle(message)\n ):\n sms_app.handle(message)\n\n\nwhile True:\n new_messages = (\n db.query(ReceivedMessage)\n .filter(ReceivedMessage.processed_at.is_(None))\n .all()\n )\n\n for message in new_messages:\n handle(message)\n\n sleep(5)\n \n"
},
{
"alpha_fraction": 0.7517182230949402,
"alphanum_fraction": 0.7603092789649963,
"avg_line_length": 51.95454406738281,
"blob_id": "4744baa7d5432f455c52bc095702c7b16bdb3203",
"content_id": "bd27b73312bcebd2a0e42f47200a8aded052d183",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1164,
"license_type": "no_license",
"max_line_length": 249,
"num_lines": 22,
"path": "/README.md",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "# python sms-server\nA simple SMS server made in Python. It's a server where you send SMS messages as requests, those are handled by the server's handlers and the server sends the corresponding response as an SMS. You can make easily your own handlers and register them.\nTo run properly it needs a GSM module (in my case Ai Thinker A6) connected to the serial port (hardcoded /dev/ttyUSB0).\nAll it currently does is to listen for incomming SMS messages in an eternal loop, process them, send the response and clear the SMS memory of the GSM module.\n\n## Why?\nI have a Nokia 3310 and I want to get the best user experience out of it!\n\n## Available SMS handlers (SMS \"apps\"):\n\n### Navigation \nyou send an SMS message \n\t```navigate \"addressA\" \"addresB\"```\nto the phone number of your GSM module and you get a response containing the distance and compass bearing from addresA to addressB\n\n## Running the project:\n\n1. copy config.py.example to config.py, add your phone number to the whitelist in it and set the pin of the SIM card in the GSM module\n2. install dependencies\n\t```pip install -r requirements.txt```\n3. run the project by\n\t```sudo python3 -m sms_server.py```"
},
{
"alpha_fraction": 0.5620567202568054,
"alphanum_fraction": 0.5620567202568054,
"avg_line_length": 24.68181800842285,
"blob_id": "4300824eef3453d955048192591d2d3fad40e85b",
"content_id": "c52b9072a62038e988e29801823b6a5a5727a94f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 564,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 22,
"path": "/db_init.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "import db\n\ndb.execute_raw(\"\"\"\n DROP TABLE IF EXISTS received_messages;\n DROP TABLE IF EXISTS messages_to_send;\n\n CREATE TABLE received_messages(\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n phone_from TEXT,\n msg_body TEXT,\n created_at TIMESTAMP,\n processed_at TIMESTAMP\n );\n\n CREATE TABLE messages_to_send(\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n phone_to TEXT,\n msg_body TEXT,\n created_at TIMESTAMP,\n sent_at TIMESTAMP\n );\n\"\"\")"
},
{
"alpha_fraction": 0.713409960269928,
"alphanum_fraction": 0.713409960269928,
"avg_line_length": 32.487178802490234,
"blob_id": "2f523197e55b3d90b5dad35ed5c89fcaa79aae55",
"content_id": "786dccf30dc8545af64dc221d78c378fbcab2dda",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1305,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 39,
"path": "/models.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "from sqlalchemy import Column, DateTime, String, Integer, JSON, ForeignKey, Boolean, func\nfrom sqlalchemy.orm import relationship, backref\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import class_mapper\nfrom sqlalchemy.orm.properties import ColumnProperty\n\nimport db\nfrom datetime import datetime\n\n\nBase = declarative_base()\nBase.metadata.create_all(db.engine)\n\n\nclass BaseMixin(object):\n def as_dict(self):\n result = {}\n for prop in class_mapper(self.__class__).iterate_properties:\n if isinstance(prop, ColumnProperty):\n result[prop.key] = getattr(self, prop.key)\n return result\n\n\nclass ReceivedMessage(BaseMixin, Base):\n __tablename__ = 'received_messages'\n id = Column(Integer, primary_key=True)\n phone_from = Column(String, default=None) \n msg_body = Column(String, default=None)\n created_at = Column(DateTime, default=datetime.utcnow())\n processed_at = Column(DateTime, default=None)\n\n\nclass MessageToSend(BaseMixin, Base):\n __tablename__ = 'messages_to_send'\n id = Column(Integer, primary_key=True)\n phone_to = Column(String, default=None) \n msg_body = Column(String, default=None)\n created_at = Column(DateTime, default=datetime.utcnow())\n sent_at = Column(DateTime, default=None)"
},
{
"alpha_fraction": 0.615363597869873,
"alphanum_fraction": 0.6407873034477234,
"avg_line_length": 33.18691635131836,
"blob_id": "eefad6355202c6a3895fb191359a5e0dddad6cd7",
"content_id": "fad47e416a04364e7d26239d3ce52acb395fd562",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3665,
"license_type": "no_license",
"max_line_length": 230,
"num_lines": 107,
"path": "/sms_apps/navigator.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "import requests\nfrom math import radians, cos, sin, asin, sqrt\nimport math\nfrom unidecode import unidecode\n\nimport config\nfrom .sms_app import SmsApp\nimport gsm_module\n\n\ngeocode_api_url = \"https://maps.google.com/maps/api/geocode/json\"\n\n\nclass Navigator(SmsApp):\n def should_handle(self, sms):\n return sms.msg_body.lower().startswith(\"navigate\")\n\n def get_response(self, sms):\n sms_text_splitted = sms.msg_body.split(\"\\\"\")\n origin = sms_text_splitted[1]\n destination = sms_text_splitted[3]\n\n origin_info = self.get_address_info(origin)\n destination_info = self.get_address_info(destination)\n\n origin_coord = (origin_info[\"geometry\"][\"location\"][\"lat\"], origin_info[\"geometry\"][\"location\"][\"lng\"])\n destination_coord = (destination_info[\"geometry\"][\"location\"][\"lat\"], destination_info[\"geometry\"][\"location\"][\"lng\"])\n\n distance = int(haversine(origin_coord, destination_coord))\n compass_bearing = int(get_compass_bearing(origin_coord, destination_coord))\n\n return \"Dist: \" + distance_to_str(distance) + \"\\r\\nHead: \" + str(compass_bearing) + \" deg\\r\\nOrig: \" + unidecode(origin_info[\"formatted_address\"][:42]) + \"\\r\\nDest: \" + unidecode(destination_info[\"formatted_address\"][:42])\n\n def get_address_info(self, address):\n response = requests.get(geocode_api_url, {\n \"address\": address,\n \"key\": config.geocode_api_key,\n }).json()\n \n return response[\"results\"][0]\n\n\ndef distance_to_str(distance_in_m):\n if distance_in_m > 1000:\n return str(round(distance_in_m / 1000, 1)) + \" km\"\n\n else:\n return str(distance_in_m) + \" m\"\n\n\ndef haversine(point_1, point_2):\n \"\"\"\n Calculate the great circle distance between two points\n on the earth (specified in decimal degrees)\n \"\"\"\n lat1, lon1 = point_1\n lat2, lon2 = point_2\n\n # convert decimal degrees to radians\n lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])\n # haversine formula\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2\n c = 2 * asin(sqrt(a))\n # Radius of earth in kilometers is 6371\n m = 6371000 * c\n return m\n\n\ndef get_compass_bearing(point_1, point_2):\n \"\"\"\n Calculates the bearing between two points.\n The formulae used is the following:\n θ = atan2(sin(Δlong).cos(lat2),\n cos(lat1).sin(lat2) − sin(lat1).cos(lat2).cos(Δlong))\n :Parameters:\n - `pointA: The tuple representing the latitude/longitude for the\n first point. Latitude and longitude must be in decimal degrees\n - `pointB: The tuple representing the latitude/longitude for the\n second point. Latitude and longitude must be in decimal degrees\n :Returns:\n The bearing in degrees\n :Returns Type:\n float\n \"\"\"\n if (type(point_1) != tuple) or (type(point_2) != tuple):\n raise TypeError(\"Only tuples are supported as arguments\")\n\n lat1 = math.radians(point_1[0])\n lat2 = math.radians(point_2[0])\n\n diffLong = math.radians(point_2[1] - point_1[1])\n\n x = math.sin(diffLong) * math.cos(lat2)\n y = math.cos(lat1) * math.sin(lat2) - (math.sin(lat1)\n * math.cos(lat2) * math.cos(diffLong))\n\n initial_bearing = math.atan2(x, y)\n\n # Now we have the initial bearing but math.atan2 return values\n # from -180° to + 180° which is not what we want for a compass bearing\n # The solution is to normalize the initial bearing as shown below\n initial_bearing = math.degrees(initial_bearing)\n compass_bearing = (initial_bearing + 360) % 360\n\n return compass_bearing\n"
},
{
"alpha_fraction": 0.5788035988807678,
"alphanum_fraction": 0.5889101624488831,
"avg_line_length": 25.149999618530273,
"blob_id": "75b8ca73adee31b2704579e1dea6ecb012a908ef",
"content_id": "0316ee845995df25072d2de9b9dbd12ea3747b93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3661,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 140,
"path": "/gsm_module.py",
"repo_name": "refi93/sms-server",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport serial\nfrom datetime import datetime\n\nimport config\nimport db\nfrom models import MessageToSend, ReceivedMessage\n\nport = serial.Serial(config.serial_port, 9600, timeout=4)\nif (not port.isOpen()):\n port.open()\n\n\ndef send_at_command(command, append_eol=True):\n command = command + \"\\r\\n\" if append_eol else command\n port.write(command.encode())\n return list(map(lambda elem: elem.decode(\"utf-8\"), port.readlines()))\n\n\ndef init(pin=None):\n while True:\n result = send_at_command(\"ATI\")\n if len(result) > 0 and result[-1] == \"OK\\r\\n\":\n break\n\n if (not enter_pin(pin)):\n raise Error(\"PIN authentification has failed!\")\n\n # switch to text mode so commands look nicer\n send_at_command(\"AT+CMGF=1\")\n\n # store received sms on sim card\n # i.e. disable cnmi notifications and set storage\n # of newly arrived messages to gsm module memory\n send_at_command(\"AT+CNMI=0,0,0,0,0\")\n send_at_command(\"AT+CPMS=\\\"ME\\\",\\\"ME\\\",\\\"ME\\\"\")\n\n print(\"GSM module initialized!\")\n\n\ndef enter_pin(pin=None):\n pin_status = send_at_command(\"AT+CPIN?\")[2]\n\n if pin_status == \"+CPIN:READY\\r\\n\":\n return True\n elif pin_status == \"+CPIN:SIM PIN\\r\\n\":\n auth_result = send_at_command(\"AT+CPIN=\\\"\" + pin + \"\\\"\")\n return auth_result[2] == \"OK\\r\\n\"\n else:\n return False\n\n\ndef send_sms_message(phone_number, text):\n assert phone_number.startswith(\"+421\")\n\n command_sequence = [\n \"AT+CMGF=1\",\n \"AT+CMGS=\" + phone_number,\n text\n ]\n\n for command in command_sequence:\n send_at_command(command)\n\n result = send_at_command(chr(26), False)\n print(result)\n\n\ndef get_sms_messages(category=\"ALL\"):\n assert category in [\n \"ALL\", \"REC READ\", \"REC UNREAD\", \"STO UNSENT\", \"STO SENT\"\n ]\n\n result = []\n response_raw = send_at_command(\"AT+CMGL=\" + category)\n\n print(response_raw)\n\n sms_list_raw = response_raw[2:-2]\n # the odd elements are sms metadata, the even ones are sms texts\n sms_pairs = zip(sms_list_raw[0::2], sms_list_raw[1::2])\n\n for sms_meta, sms_text in sms_pairs:\n result.append(parse_sms(sms_meta, sms_text))\n\n print(result)\n\n return result\n\n\ndef delete_all_sms_messages():\n sms_messages_to_delete = get_sms_messages(\"ALL\")\n\n for sms_message in sms_messages_to_delete:\n delete_sms_message(sms_message[\"index\"])\n\n\ndef delete_sms_message(index):\n return send_at_command(\"AT+CMGD=\" + str(index))\n\n\ndef parse_sms(sms_meta, sms_text):\n sms_meta = sms_meta.split(',')\n\n return {\n 'index': int(sms_meta[0].split(': ')[1]),\n 'category': sms_meta[1].split(\"\\\"\")[1],\n 'sender': sms_meta[2].split(\"\\\"\")[1],\n 'date': sms_meta[4].split(\"\\\"\")[1],\n 'text': sms_text\n }\n\nif __name__ == '__main__':\n init(pin=config.sim_card_pin)\n\n while (True):\n # check received messages\n for sms_message in get_sms_messages():\n print('new message arrived')\n msg = ReceivedMessage(\n phone_from=sms_message['sender'],\n msg_body=sms_message['text'],\n )\n\n db.session.add(msg)\n db.session.commit()\n delete_sms_message(sms_message[\"index\"])\n\n # send messages waiting to be sent\n messages_to_send = (\n db.query(MessageToSend)\n .filter(MessageToSend.sent_at.is_(None))\n .all()\n )\n for message in messages_to_send:\n send_sms_message(message.phone_to, message.msg_body)\n \n message.sent_at = datetime.utcnow()\n db.session.commit()\n"
}
] | 9 |
jJayyyyyyy/network | https://github.com/jJayyyyyyy/network | 284a2845c4431e802a48ea331135a3e2035663cd | 86794dd9d828fe66b7ada28233fbbd5c66ecc50d | 79df1e2fde419883fba5f3a79eddfd6c3b7875cc | refs/heads/master | 2022-03-01T23:51:10.619772 | 2019-08-11T12:27:35 | 2019-08-11T12:27:35 | 116,240,231 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7460317611694336,
"alphanum_fraction": 0.7460317611694336,
"avg_line_length": 20,
"blob_id": "aec516d763d7ba81e430cd40591961552b89143c",
"content_id": "d9db0392b2ac8b724f08926267b427bf4b1feea3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 126,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 6,
"path": "/udacity/cs253/Integration/_01-04_/index.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass IndexHandler(Page):\n\tfilename = 'index.html'\n\tdef get(self):\n\t\treturn self.render(self.filename)\n"
},
{
"alpha_fraction": 0.6314750909805298,
"alphanum_fraction": 0.6396151781082153,
"avg_line_length": 26.958620071411133,
"blob_id": "d912f190d04955cf0274cf690a258cd23fba82bb",
"content_id": "663b6c3e6d9bd663bc1b85936dbbae0ce916290a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4054,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 145,
"path": "/udacity/cs253/ProblemSet03/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import Flask, request, redirect, url_for, g\nimport sqlite3, datetime\nimport page\napp = Flask(__name__)\n\n#################################################################################\nDATABASE = 'post.db'\n\ndef get_conn():\n\tif not hasattr(g, 'sqlite3_conn'):\n\t\tconn = sqlite3.connect(DATABASE)\n\t\tconn.row_factory = sqlite3.Row\n\t\tg.sqlite3_conn = conn\n\treturn g.sqlite3_conn\n\ndef init_db():\n\twith app.app_context():\n\t\tcur = get_conn().cursor()\n\t\twith app.open_resource('schema.sql', mode='r') as f:\n\t\t\tcur.executescript(f.read())\n\t\tcur.close()\n\[email protected]('initdb')\ndef initdb_command():\n\tinit_db()\n\tprint('database initialized')\n\[email protected]_appcontext\ndef close_conn(error):\n\tif hasattr(g, 'sqlite3_conn'):\n\t\tconn = g.sqlite3_conn\n\t\tconn.cursor().close()\n\t\tconn.commit()\n\t\tconn.close()\n\t\tprint('db connection closed')\n\ndef query_db(query, args=()):\n\tcur = get_conn().cursor()\n\tcur.execute(query, args)\n\treturn cur\n\n# @args(update_date, post_subject, post_content)\ndef insert_record(args):\n\tif args and isinstance(args, tuple) and len(args) == 3:\n\t\tquery = 'insert into entries (update_date, post_subject, post_content) values (?, ?, ?)'\n\t\tquery_db(query, args=args)\n\t\treturn True\n\telse:\n\t\treturn False\n\n# @args(update_date, post_subject, post_content, post_id)\ndef update_record(args):\n\tif args and isinstance(args, tuple) and len(args) == 4:\n\t\tquery = 'update entries set update_date = ?, post_subject = ?, post_content = ? where post_id = ?'\n\t\tquery_db(query, args)\n\t\treturn True\n\telse:\n\t\treturn False\n\ndef get_record(post_id):\n\tif post_id and isinstance(post_id, int):\n\t\tquery = 'select * from entries where post_id = %d' % post_id\n\t\tcur = query_db(query)\n\t\trecord_list = cur.fetchall()\n\t\tif record_list:\n\t\t\treturn record_list[0]\n\t\telse:\n\t\t\treturn None\n\ndef get_last_post_id():\n\tquery = 'select post_id from entries where post_id = (select max(post_id) from entries)'\n\t# query = 'select * from entries order by post_id desc limit 1'\n\tcur = query_db(query)\n\trecord_list = cur.fetchall()\n\tif record_list:\n\t\trecord = record_list[0]\n\t\tpost_id = record['post_id']\n\t\treturn post_id\n\telse:\n\t\treturn None\n\ndef get_record_list(limit=10):\n\tquery = 'select * from entries order by post_id desc limit %d' % limit\n\tcur = query_db(query)\n\trecord_list = cur.fetchall()\n\treturn record_list\n\n#################################################################################\n\n\n\n\[email protected]('/blog', methods=['GET'])\ndef get_blog():\n\trecord_list = get_record_list()\n\treturn page.render_blog(record_list=record_list)\n\[email protected]('/blog/<int:post_id>', methods=['GET'])\ndef get_post(post_id):\n\trecord = get_record(post_id)\n\treturn page.render_post(record)\n\[email protected]('/blog/edit/<int:post_id>', methods=['GET'])\ndef get_edit_post(post_id):\n\trecord = get_record(post_id)\n\treturn page.render_edit_post(**record)\n\[email protected]('/blog/edit/<int:post_id>', methods=['POST'])\ndef post_edit_post(post_id):\n\tpost_subject = request.form.get('post_subject')\n\tpost_content = request.form.get('post_content')\n\n\tif post_subject and post_content:\n\t\tupdate_date = datetime.date.today()\n\t\targs = (update_date, post_subject, post_content, str(post_id))\n\t\tupdate_record(args)\n\treturn redirect(url_for('get_post', post_id=post_id))\n\[email protected]('/blog/new_post', methods=['GET'])\ndef get_new_post():\n\treturn page.render_new_post()\n\[email protected]('/blog/new_post', methods=['POST'])\ndef post_new_post():\n\tpost_subject = request.form.get('post_subject')\n\tpost_content = request.form.get('post_content')\n\t\n\tif post_subject and post_content:\n\t\tupdate_date = datetime.date(2018, 1, 1)\n\t\targs = (update_date, post_subject, post_content)\n\t\tinsert_record(args)\t\n\t\tpost_id = get_last_post_id()\n\t\treturn redirect(url_for('get_post', post_id=post_id))\n\t\n\tkw = {\t'error': 'invalid subject or content',\n\t\t\t'post_subject': post_subject,\n\t\t\t'post_content': post_content\n\t\t}\n\treturn page.render_new_post(**kw)\n\n#################################################################################\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n\t# app.run(port=8080, host='0.0.0.0')\n"
},
{
"alpha_fraction": 0.6021341681480408,
"alphanum_fraction": 0.6097561120986938,
"avg_line_length": 20.899999618530273,
"blob_id": "04b228ac42dfa567b464c04622685bbd7a632923",
"content_id": "54e51e855ade4bafade99b73a34934344329b943",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 656,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 30,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/welcome/welcome.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var app = new Vue({\n\tel: '#welcome',\n\tdata: {\n\t\tpathname: '',\n\t\tredirectPath: '',\n\t\tuserList: []\n\t},\n\tcreated: function(){\n\t\tthis.pathname = document.location.pathname;\n\t\tthis.redirectPath = document.location.search.split('=')[1]\n\t\tthis.getUsername();\n\t},\n\tmethods: {\n\t\tgetUsername: function(){\n\t\t\tvar APIjson = `${this.pathname}?q=json`;\n\t\t\tvar self = this;\n\t\t\taxios.get(APIjson)\n\t\t\t\t.then(function(resp){\n\t\t\t\t\tself.userList = resp.data;\n\t\t\t\t\tself.redirect();\n\t\t\t\t})\n\t\t},\n\t\tredirect: function(){\n\t\t\tvar self = this;\n\t\t\tif( self.redirectPath ){\n\t\t\t\twindow.setTimeout( function(){ window.location.replace( `${self.redirectPath}` ); }, 2000 );\n\t\t\t}\n\t\t}\n\t}\n})"
},
{
"alpha_fraction": 0.5679956078529358,
"alphanum_fraction": 0.5865647196769714,
"avg_line_length": 29.516666412353516,
"blob_id": "3fa4e6c5b037a39293c858a1f0ed7c111e474bf5",
"content_id": "7160dc8de2f0e3ac89e13f96b2791cbcc2e828f3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3662,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 120,
"path": "/vuejs/demo/nba_teams/assets/dl.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import requests, json, time\nfrom io import StringIO\n\nsess = requests.Session()\nheaders = {\n 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n 'Accept-Encoding': 'gzip, deflate',\n 'Accept-Language': 'zh-CN,zh;q=0.9',\n 'Host': 'nba.stats.qq.com',\n 'Cache-Control': 'no-cache',\n 'Pragma': 'no-cache',\n 'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36'\n}\n\ndef get_logos():\n headers['Host'] = 'mat1.gtimg.com'\n url = 'http://mat1.gtimg.com/sports/nba/logo/1602/%d.png'\n for i in range(1, 31):\n try:\n resp = sess.get(url % i, headers=headers)\n with open('%d.png' % i, 'wb') as f:\n f.write(resp.content)\n except:\n pass\n \ndef get_teams_raw():\n headers['Host'] = 'matchweb.sports.qq.com'\n url = 'http://matchweb.sports.qq.com/rank/team'\n params = {'competitionId': '100000', 'from': 'NBA_PC'}\n try:\n resp = sess.get(url, headers=headers, params=params).text\n resp = resp.lstrip('[0,').rstrip(',\"\"]')\n with open('teams_raw.json', 'w') as f:\n f.write(resp)\n except:\n pass\n\n\ndef get_stats():\n headers['Host'] = 'ziliaoku.sports.qq.com'\n url = 'http://ziliaoku.sports.qq.com/cube/index'\n params = { 'cubeId': '12',\n 'dimId': '3,4,12,13',\n 'from': 'sportsdatabase'\n }\n all_stats = {}\n for id in range(1, 31):\n all_stats[str(id)] = {}\n params['params'] = 't1:%d' % id\n try:\n resp = sess.get(url, headers=headers, params=params).text\n data = json.loads(resp)['data']\n all_stats[str(id)]['statRank'] = data['regStatRank']\n all_stats[str(id)]['statAssociation'] = data['nbaTeamUnionRegSeasonStat']\n all_stats[str(id)]['statTeam'] = data['nbaTeamRegSeasonStat']\n except Exception as e:\n print(e)\n time.sleep(2)\n\n all_stats = json.dumps(all_stats, ensure_ascii=False)\n with open('stats.json', 'w') as f:\n f.write(all_stats)\n\ndef filter_teams(team_list):\n\tnew_team_list = []\n\tfor i in range(5):\n\t\tteam = team_list[i]\n\t\tnew_team = {}\n\t\tnew_team['name'] = team['name']\n\t\tnew_team['teamId'] = team['teamId']\n\t\tnew_team_list.append(new_team)\n\treturn new_team_list\n\ndef get_teams():\n\twith open('teams_raw.json', 'r') as f:\n\t\tdata = json.loads(f.read()) # type(data) ==> dict\n\tdata['eastsouth'][2]['teamId'] = '30'\n\n\tnew_data = {\n\t\t'east': {\n\t\t\t'atlantic': filter_teams(data['atlantic']),\n\t\t\t'central': filter_teams(data['central']),\n\t\t\t'eastsouth': filter_teams(data['eastsouth'])\n\t\t},\n\t\t'west': {\n\t\t\t'pacific': filter_teams(data['pacific']),\n\t\t\t'westnorth': filter_teams(data['westnorth']),\n\t\t\t'westsouth': filter_teams(data['westsouth'])\n\t\t}\n\t}\n\n\tnew_data = json.dumps(new_data, ensure_ascii=False) # in case javascript can't read 'key' in str_json\n\twith open('teams.json', 'w') as f:\n\t\tf.write(new_data)\n\ndef update_team_stats():\n\twith open('stats.json', 'r') as f:\n\t\tstats = json.loads(f.read())\n\n\twith open('teams.json', 'r') as f:\n\t\tdata = json.loads(f.read())\n\n\tfor conf in data:\t\n\t\tfor region in data[conf]:\n\t\t\tfor i in range(5):\n\t\t\t\tteam = data[conf][region][i]\n\t\t\t\tteamId = team['teamId']\n\t\t\t\tstat = stats[teamId]\n\t\t\t\tteam['statRank'] = stat['statRank']\n\t\t\t\tteam['statTeam'] = stat['statTeam']\n\t\t\t\tteam['statAssociation'] = stat['statAssociation']\n\t\t\t\tdata[conf][region][i] = team\n\n\tdata = json.dumps(data, ensure_ascii=False)\n\twith open('teams.json', 'w') as f:\n\t\tf.write(data)\n\n# TODO crontab\n# get_stats()\n# update_team_stats()\n"
},
{
"alpha_fraction": 0.7771428823471069,
"alphanum_fraction": 0.7771428823471069,
"avg_line_length": 24.14285659790039,
"blob_id": "26c05d6c08105f1abceb9fdba51442f1d589238b",
"content_id": "b5c1d72cb05ff59955c46ab50f1722dba7020bbc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 175,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 7,
"path": "/udacity/cs253/ProblemSet03/schema.sql",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "drop table if exists entries;\ncreate table entries (\n\tpost_id integer primary key autoincrement,\n\tupdate_date date,\n\tpost_subject text not null,\n\tpost_content text not null\n);"
},
{
"alpha_fraction": 0.7144549489021301,
"alphanum_fraction": 0.7452606558799744,
"avg_line_length": 26.225807189941406,
"blob_id": "571dcf4d6be088d8ca1a8753d386dd6465cc02fe",
"content_id": "f7128cf7759bc080ff932e862099f47538251aef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1304,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 31,
"path": "/vuejs/The_Vue_Instance/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "这一节里面提到了hook,钩子。\n\n> Along the way, it also runs functions called lifecycle hooks, giving users the opportunity to add their own code at specific stages.\n\n略微查了下资料,个人理解hook就是一个你可以插入自己代码的地方。\n\n\n\n上面的流程图中,红色框的地方就是hook,是Vue提供的一个接口。同时这些hook也是一个Vue对象的property,属性。\n\n如果你想在红色框所在的阶段做点什么事情,就可以用这些hook,进入这些hook(入口),则需要通过给相应的hook(property)赋值一个函数,也就是property是key,函数是value\n\n以文档中的demo为例,从流程图中可以看到created是一个hook,对应Vue的created属性,它的value是一个自己写的函数,这样,到了created阶段,这个函数就会被调用和执行。然后继续created之后的阶段\n\n```javascript\nvar vm = new Vue({\n data: {\n a: 1\n },\n created: function () {\n // `this` points to the vm instance\n console.log('a is: ' + this.a)\n }\n})\n// => \"a is: 1\"\n```\n\n<br><br><br>\n\n* [vue 中钩子 是怎样的一个概念?](https://www.zhihu.com/question/50880439)\n* [什么叫“钩子”?](https://segmentfault.com/q/1010000004335505)\n"
},
{
"alpha_fraction": 0.6493808031082153,
"alphanum_fraction": 0.6524767875671387,
"avg_line_length": 20.180328369140625,
"blob_id": "d302a2bd4abe7dc868dace3a9fef0ea159e5741c",
"content_id": "c60f14a9d50a226e6db65d0129f3a1993418a7b9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1292,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 61,
"path": "/udacity/cs253/ProblemSet03/test_db.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import sqlite3, logging, datetime\ndb_name='post.db'\n\ndef do_sql(sql, args=''):\n\tres = None\n\ttry:\n\t\tconn = sqlite3.connect(db_name)\n\t\tcur = conn.cursor()\n\t\tcur.execute(sql, args)\n\t\tres = cur.fetchall()\n\t\tconn.commit()\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\tfinally:\n\t\tcur.close()\n\t\tconn.close()\n\treturn res\n\ndef insert():\n\tsql = 'insert into entries (update_date, post_subject, post_content) values (?, ?, ?)'\n\tupdate_date = datetime.date.today()\n\tpost_subject = 'hello'\n\tpost_content = 'world'\n\targs = (update_date, post_subject, post_content)\n\tdo_sql(sql, args)\n\ndef get_record_list():\n\tsql = 'select * from entries'\n\trecord_list = do_sql(sql)\n\treturn record_list\n\ndef update():\n\ttoday = post_date = datetime.date.today()\n\tprint(type(today))\n\tquery = 'update entries set post_date = ? where post_id = ?)'\n\targs = (today, '1')\n\t# do_sql(query, args)\n\ndef init():\n\tsql = 'drop table if exists entries'\n\tdo_sql(sql)\n\tsql = 'create table entries (\\\n\t\t\t\tpost_id integer primary key autoincrement,\\\n\t\t\t\tupdate_date date not null,\\\n\t\t\t\tpost_subject text not null,\\\n\t\t\t\tpost_content text not null)'\n\tdo_sql(sql)\n\ndef test():\n\tinit()\n\tinsert()\n\t# post_id = '1'\n\t\n\t# post_subject = \n\t# art = '^_^'\n\t# update()\n\trecord_list = get_record_list()\n\tfor item in record_list:\n\t\tprint(*item)\n\ntest()\n"
},
{
"alpha_fraction": 0.6679198145866394,
"alphanum_fraction": 0.6729323267936707,
"avg_line_length": 26.70833396911621,
"blob_id": "20690b0badd792ea4e2c322ec478960380354156",
"content_id": "66ffd08fbc959638d80d5e38a3c9abbd3aba7f79",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3990,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 144,
"path": "/udacity/cs253/Integration/_01-04_/user.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import re, random, hashlib\nfrom string import ascii_letters as letters\nfrom page import Page\nfrom database import Database\n\nUNUSED = 0\n\nre_username = re.compile(r'^[a-zA-Z0-9_-]{3,20}$')\ndef valid_username(username):\n\treturn re_username.match(username)\n\nre_password = re.compile(r'^.{3,20}$')\ndef valid_password(password):\n\treturn re_password.match(password)\n\nre_email = re.compile(r'^[\\S]+@[\\S]+.[\\S]+$')\ndef valid_verify(pd1, pd2):\n\treturn pd1 == pd2\n\ndef valid_email(email):\n\tif not email:\n\t\treturn True\n\telse:\n\t\treturn re_email.match(email)\n\ndef make_salt(length = 5):\n\t# salt = []\n\t# for i in range(length):\n\t# \tsalt.append(random.choice(letters))\n\t# return ''.join(salt)\n\treturn ''.join(random.choice(letters) for x in range(length))\n\ndef make_pw_hash(username, password, salt=None):\n\tif not salt:\n\t\tsalt = make_salt()\n\tenc = (username + password + salt).encode('utf-8')\n\tpw_hash = hashlib.sha256(enc).hexdigest()\n\treturn '%s,%s' % (salt, pw_hash)\n\ndef check_pw_hash(username, password, pw_hash):\n\tsalt = pw_hash.split(',')[0]\n\treturn pw_hash == make_pw_hash(username, password, salt)\n\n\nclass User(object):\n\tdef __init__(self, form):\n\t\tself.user_id = str(form.get('user_id'))\n\t\tself.username = form.get('username')\n\t\tself.password = form.get('password')\n\t\tself.email = form.get('email')\n\nclass Record(User):\n\tdef insert(self):\n\t\tquery = 'insert into users (username, pw_hash, email) values (?, ?, ?)'\n\t\tself.pw_hash = make_pw_hash(self.username, self.password)\n\t\targs = (self.username, self.pw_hash, self.email)\n\t\treturn Database().query_db(query, args)\n\n\tdef retrieve(self):\n\t\tquery = 'select * from users where username = ?'\n\t\targs = (self.username, )\n\t\trecord_list = Database().query_db(query, args)\n\t\treturn record_list\n\n\tdef update(self):\n\t\tquery = 'update users set pw_hash = ?, email = ? where username = ?'\n\t\tself.pw_hash = make_pw_hash(self.password)\n\t\targs = (self.pw_hash, self.email, self.username)\n\t\treturn Database().query_db(query, args)\n\ndef check_valid(form):\n\tvalid = True\n\tif not valid_username( form['username'] ):\n\t\tform['username_error'] = 'Invalid username'\n\t\tvalid = False\n\tif not valid_email( form['email'] ):\n\t\tform['email_error'] = 'Invalid email'\n\t\tvalid = False\n\tif not valid_verify( form['password'], form['verify'] ):\n\t\tform['verify_error'] = 'Password not matched'\n\t\tvalid = False\n\telif not valid_password( form['password'] ):\n\t\tform['password_error'] = 'Invalid password'\n\t\tvalid = False\n\t\n\tif valid:\n\t\tform['valid'] = valid\n\telse:\n\t\tform['password'] = form['verify'] = ''\n\treturn form\n\ndef check_usable(form):\n\tif not Record(form).retrieve():\n\t\tform['usable'] = True\n\telse:\n\t\tform['username_error'] = 'Username already taken, please choose another one.'\n\treturn form\n\nclass SignupHandler(Page):\n\tfilename = 'signup.html'\n\tdef get(self):\n\t\treturn self.render(self.filename)\n\n\tdef post(self):\n\t\tform = check_valid(self.form() )\t# check if valid\n\t\tif form.get('valid') == True:\n\t\t\tform = check_usable(self.form())\t# check if exist\n\t\t\tif form.get('usable') == True:\n\t\t\t\treturn self.register(form)\n\t\treturn self.render(self.filename, **form)\n\n\tdef register(self, form):\n\t\tRecord(form).insert()\n\t\trecord_list = Record(form).retrieve()\n\t\tif record_list:\n\t\t\tuser = User(form)\n\t\t\treturn self.login(user)\t# login and set cookie\n\t\telse:\n\t\t\treturn 'Oops...something went wrong...'\n\t\t\nclass SigninHandler(Page):\n\tfilename = 'signin.html'\n\tdef get(self):\n\t\tif self.check_valid_cookie():\n\t\t\treturn self.redirect('/welcome')\n\t\telse:\n\t\t\treturn self.render(self.filename)\n\n\tdef post(self):\n\t\tus_form = self.form()\n\t\trecord_list = Record(us_form).retrieve()\n\t\tif record_list:\n\t\t\tus_username = us_form.get('username')\n\t\t\tus_pw = us_form.get('password')\n\t\t\trecord = dict(record_list[0])\n\t\t\tpw_hash = record.get('pw_hash')\n\t\t\tif check_pw_hash(us_username, us_pw, pw_hash):\n\t\t\t\tuser = User(record)\n\t\t\t\treturn self.login(user)# sign in and set cookie\n\t\treturn self.render(self.filename, error='invalid login')\n\t\t\nclass SignoutHandler(Page):\n\tdef get(self):\n\t\treturn self.logout()\n"
},
{
"alpha_fraction": 0.6781741380691528,
"alphanum_fraction": 0.6835442781448364,
"avg_line_length": 28.965517044067383,
"blob_id": "1bc4c8a3250d8eb6c1569a0fb38372222d22ebc7",
"content_id": "f2c7451de9ace68dbe264009ed1b437befb35a26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2607,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 87,
"path": "/udacity/cs253/Integration/_01-04_/blog.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\nfrom database import Database\n\nclass Post(object):\n\tdef __init__(self, post_id='', update_date='', subject='', content=''):\n\t\tself.post_id = post_id\n\t\tself.update_date = update_date\n\t\tself.subject = subject\n\t\tself.content = content\n\nclass Record(Post):\n\tdef insert(self):\n\t\tquery = 'insert into posts (update_date, subject, content) values (?, ?, ?)' \n\t\targs = (self.update_date, self.subject, self.content)\t\n\t\treturn Database().query_db(query, args)\n\n\tdef update(self):\n\t\tquery = 'update posts set update_date = ?, subject = ?, content = ? where post_id = ?'\n\t\targs = (self.update_date, self.subject, self.content, self.post_id)\n\t\treturn Database().query_db(query, args)\n\n\tdef retrieve(self, post_id=0, limit=10):\n\t\tif post_id > 0:\n\t\t\tquery = 'select * from posts where post_id = %d' % post_id\t\n\t\telse:\n\t\t\tquery = 'select * from posts order by post_id desc limit %d' % limit\n\t\treturn Database().query_db(query)\n\n\tdef delete(self):\n\t\tquery = 'delete from posts where post_id = ?'\n\t\targs = (self.post_id, )\n\t\treturn Database().query_db(query, args)\n\ndef get_post_list(record_list=[]):\n\tpost_list = []\n\tfor record in record_list:\n\t\tpost = Post(*record)\n\t\tpost_list.append(post)\n\treturn post_list\n\ndef get_post(record_list=[]):\n\tif record_list:\n\t\trecord = record_list[0]\n\t\tpost = Post(*record)\n\t\treturn post\n\nclass BlogIndexHandler(Page):\n\tfilename = 'blog_index.html'\n\tdef get(self):\n\t\trecord_list = Record().retrieve(limit=10)\n\t\tpost_list = get_post_list(record_list)\n\t\treturn self.render(self.filename, post_list=post_list)\n\t\nclass GetPostHandler(Page):\n\tfilename = 'blog_post.html'\n\tdef get(self, post_id):\n\t\trecord_list = Record().retrieve(post_id=post_id)\n\t\tpost = get_post(record_list)\n\t\treturn self.render(self.filename, post=post)\n\nclass NewPostHandler(Page):\n\tfilename = 'blog_new_post.html'\n\tdef get(self):\n\t\treturn self.render(self.filename)\n\n\tdef post(self):\n\t\tupdate_date = self.get_date(2018, 1, 1)\n\t\tsubject = self.form().get('subject')\n\t\tcontent = self.form().get('content')\n\t\tif subject and content:\n\t\t\tRecord(0, update_date, subject, content).insert()\n\t\treturn self.redirect('/blog')\n\nclass EditPostHandler(Page):\n\tfilename = 'blog_edit_post.html'\n\tdef get(self, post_id):\n\t\trecord_list = Record().retrieve(post_id=post_id)\n\t\tpost = get_post(record_list)\n\t\treturn self.render(self.filename, post=post)\n\n\tdef post(self, post_id):\n\t\tupdate_date = self.get_date()\n\t\tsubject = self.form().get('subject')\n\t\tcontent = self.form().get('content')\n\t\tif subject and content:\n\t\t\tRecord(post_id, update_date, subject, content).update()\n\t\treturn self.redirect('/blog/%d' % post_id)\n"
},
{
"alpha_fraction": 0.586776852607727,
"alphanum_fraction": 0.647382915019989,
"avg_line_length": 21.75,
"blob_id": "aa9daf50f0ff79dd9e36b8cb4087ce7ce541fe74",
"content_id": "b4135a7eaf9215773605f338235ca3b9bc8a7f55",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 363,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 16,
"path": "/transport_layer/udp/udp_python/udp_client.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n# -*- coding: utf-8 -*-\nimport socket\ns = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\nserver_host = '192.168.1.104'\nport = 8000\n\nwhile True:\n\tdata = input('Input data here: ')\n\ts.sendto(data.encode('utf-8'), (server_host, port))\n\tif not data:\n\t\tprint('Quit!\\n')\n\t\tbreak\n\tprint(\"From server: %s\\n\" % s.recv(1024).decode('utf-8'))\n\ns.close()"
},
{
"alpha_fraction": 0.5791583061218262,
"alphanum_fraction": 0.5804943442344666,
"avg_line_length": 21.34328269958496,
"blob_id": "ffa2a302ab907c51b28315c8f2aa2a0d8e4749ab",
"content_id": "25bfa3f43bc9ecc098c2d60ebc905065244dae4c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1497,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 67,
"path": "/udacity/cs253/ProblemSet02/Quiz2_SignUp/Page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from html import escape\n\nhead = '''\n<head>\n\t<title>Sign Up</title>\n\t\t<style type=\"text/css\">\n\t\t\t.label {text-align: right}\n\t\t\t.error {color: red}\n\t</style>\n</head>\n'''\n\nbody = '''\n<body>\n\t<h2>Signup</h2>\n\t%(form)s\n</body>\n'''\n\nform = '''\\\n<form method=\"post\">\n\t<table>\n\t\t<tr>\n\t\t\t<td class=\"label\">Username</td>\n\t\t\t<td><input type=\"text\" name=\"username\" value=\"%(username)s\"></td>\n\t\t\t<td class=\"error\">%(username_error)s</td>\n\t\t</tr>\n\t\t<tr>\n\t\t\t<td class=\"label\">Password</td>\n\t\t\t<td><input type=\"password\" name=\"password\" value=\"%(password)s\"></td>\n\t\t\t<td class=\"error\">%(password_error)s</td>\n\t\t</tr>\n\t\t<tr>\n\t\t\t<td class=\"label\">Verify Password</td>\n\t\t\t<td><input type=\"password\" name=\"verify\" value=\"%(verify)s\"></td>\n\t\t\t<td class=\"error\">%(verify_error)s</td>\n\t\t</tr>\n\t\t<tr>\n\t\t\t<td class=\"label\">Email (Optional)</td>\n\t\t\t<td><input type=\"text\" name=\"email\" value=\"%(email)s\"></td>\n\t\t\t<td class=\"error\">%(email_error)s</td>\n\t\t</tr>\n\t</table>\n\t<input type=\"submit\">\n</form>\n'''\n\ndef get_default_form_args():\n\treturn {\n\t\t'username': '',\n\t\t'username_error': '',\n\t\t'password': '',\n\t\t'password_error': '',\n\t\t'verify': '',\n\t\t'verify_error': '',\n\t\t'email': '',\n\t\t'email_error': ''\n\t}\n\ndefault_form_args = get_default_form_args()\n\ndef make_page(form_args=default_form_args):\n\tform_args = dict( (key, escape(val)) for (key, val) in form_args.items() )\n\tm_form = form % form_args\n\tm_body = body % {'form': m_form}\n\tm_page = '<html>%(head)s%(body)s</html>' % {'head': head, 'body': m_body}\n\treturn m_page\n"
},
{
"alpha_fraction": 0.6183177828788757,
"alphanum_fraction": 0.6392523646354675,
"avg_line_length": 27.457447052001953,
"blob_id": "f0147b7dbabee527895af6bfe5ebeaea6e242ca1",
"content_id": "5f9573e38a9eac671b8a80f8883d6dd20b9f583e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2675,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 94,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/nba/assets/dl.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import requests, json, time\nfrom io import StringIO\nimport logging\n\nsess = requests.Session()\nheaders = {\n\t'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n\t'Accept-Encoding': 'gzip, deflate',\n\t'Accept-Language': 'zh-CN,zh;q=0.9',\n\t'Host': 'nba.stats.qq.com',\n\t'Cache-Control': 'no-cache',\n\t'Pragma': 'no-cache',\n\t'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36'\n}\n\ndef get_team_list_raw():\n\theaders['Host'] = 'matchweb.sports.qq.com'\n\turl = 'http://matchweb.sports.qq.com/rank/team'\n\tparams = {'competitionId': '100000', 'from': 'NBA_PC'}\n\ttry:\n\t\tresp = sess.get(url, headers=headers, params=params).text\n\t\tresp = resp.lstrip('[0,').rstrip(',\"\"]')\n\t\tdata = json.loads(resp)\n\t\tfor i in range(5):\n\t\t\tif '30.png' in data['eastsouth'][i]['badge']:\n\t\t\t\tdata['eastsouth'][i]['teamId'] = 30\n\t\twith open('team_list_raw.json', 'w') as f:\n\t\t\tf.write(json.dumps(data))\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\n## param: teamQqId\n## return: {'rank': {statRank} }\ndef get_team_stat(teamQqId):\n\theaders['Host'] = 'ziliaoku.sports.qq.com'\n\turl = 'http://ziliaoku.sports.qq.com/cube/index'\n\tparams = { 'cubeId': '12',\n\t\t\t\t'dimId': '3,4,12,13',\n\t\t\t\t'from': 'sportsdatabase',\n\t\t\t\t'params': 't1:%d' % teamQqId}\n\t\n\tteam_stat = {}\n\ttry:\n\t\tresp = sess.get(url, headers=headers, params=params).text\n\t\tdata = json.loads(resp)['data']\n\t\tdata['regStatRank'].pop('teamId')\n\t\tteam_stat = {'rank': data['regStatRank']}\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\t\n\ttime.sleep(2)\n\treturn team_stat\n\ndef make_team_list():\n\tleague = {\n\t\t'east': ['eastsouth', 'central', 'atlantic'],\n\t\t'west': ['pacific', 'westnorth', 'westsouth']\n\t}\n\twith open('team_list_raw.json') as f:\n\t\tdata = json.loads(f.read())\n\t\n\tteam_list = []\n\tfor conf, regionList in sorted(league.items()):\n\t\tprint(conf)\n\t\tfor region in regionList:\n\t\t\tqq_team_list = data[region]\n\t\t\tfor qq_team in qq_team_list:\n\t\t\t\tteamQqId = int(qq_team['teamId'])\n\t\t\t\tname = qq_team['name']\n\t\t\t\tteam = {}\n\t\t\t\tteam['teamQqId'] = teamQqId\n\t\t\t\tteam['teamListId'] = len(team_list)\n\t\t\t\tteam['stat'] = get_team_stat(teamQqId)\n\t\t\t\tteam['name'] = name\n\t\t\t\tteam['homepage'] = 'http://nba.stats.qq.com/team/?id=%d' % teamQqId\n\t\t\t\tteam['logo'] = '/static/nba/assets/logo/%d.png' % teamQqId\n\t\t\t\tteam['conference'] = conf\n\t\t\t\tteam['region'] = region\n\t\t\t\tteam_list.append(team)\n\t\t\t\tprint(teamQqId, name)\n\n\tdata = json.dumps(team_list, ensure_ascii=False)\n\twith open('team_list.json', 'w') as f:\n\t\tf.write(data)\n\ndef update():\n\tget_team_list_raw()\n\tmake_team_list()\n\nupdate()\n\n# TODO crontab\n# get_stats()\n# update_team_stats()\n"
},
{
"alpha_fraction": 0.707430362701416,
"alphanum_fraction": 0.727554202079773,
"avg_line_length": 31.299999237060547,
"blob_id": "073c47607d5225b2774fdf4b310bd3a158c97f20",
"content_id": "9e2e567c2060e9d091acaef82e308d1dff4a32d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 740,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 20,
"path": "/vuejs/demo/smooth_scroll_to_top/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "1.\t动画,静态的帧,连起来,间隔非常短,就变成了动画animation\n\n2.\t注意,不能用\n\n\t```javascript\n\tvar pos = document.documentElement.scrollTop;\n\t// pos -= 50;\n\t```\n\t\n\tpos只是赋值,不是引用。必须操作原对象\n\t\n\t```javascript\n\tdocument.documentElement.scrollTop -= 40;\n\t```\n\t\n*\t[How TO - Scroll Back To Top Button](https://www.w3schools.com/howto/howto_js_scroll_to_top.asp)\n*\t[DOM Animation](https://www.w3schools.com/js/js_htmldom_animate.asp)\n*\t[DOM Animation tryjs_dom_animate_3](https://www.w3schools.com/js/tryit.asp?filename=tryjs_dom_animate_3)\n*\t[setInterval](https://www.w3schools.com/jsref/met_win_setinterval.asp)\n*\t[setInterval](http://www.w3school.com.cn/jsref/met_win_setinterval.asp)\n"
},
{
"alpha_fraction": 0.7739937901496887,
"alphanum_fraction": 0.7739937901496887,
"avg_line_length": 23.846153259277344,
"blob_id": "d8ca2783dba5ecd7220a875273747c9ae83891ac",
"content_id": "d682bd498036fc65279af4c09b49e86bbb9503a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 323,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 13,
"path": "/udacity/cs253/Integration/_01-03_/schema.sql",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "drop table if exists posts;\ndrop table if exists artworks;\n\ncreate table posts (\n\tpost_id integer primary key autoincrement,\n\tupdate_date date not null,\n\tsubject text not null,\n\tcontent text not null);\n\ncreate table artworks (\n\tartwork_id integer primary key autoincrement,\n\tsubject text not null,\n\tcontent text not null);\n"
},
{
"alpha_fraction": 0.6649983525276184,
"alphanum_fraction": 0.6696696877479553,
"avg_line_length": 25.513275146484375,
"blob_id": "30e0236c3823357afc90311dd3661b44d4862594",
"content_id": "1a3a39ab07b4c687acffcea2fbe3a312a6e2b038",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3009,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 113,
"path": "/website/page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import hmac\nfrom flask import render_template, request, redirect, make_response\nfrom flask.views import MethodView\nimport json\nimport os.path\nfrom time import time, gmtime, strftime\nfrom datetime import datetime, date\n\nhkey = 'DK'.encode()\n\nclass Page(MethodView):\n\tdef hsts(self, resp):\n\t\tresp = make_response(resp)\n\t\tresp.headers['Strict-Transport-Security'] = 'max-age=15768000'\n\t\treturn resp\n\t\n\tdef render(self, filename='', **kw):\n\t\treturn self.hsts(render_template(filename, **kw))\n\t\t\n\tdef render_raw(self, filename):\n\t\tfilename = './static/%s' % filename\n\t\twith open(filename, 'r') as f:\n\t\t\treturn self.hsts(f.read())\n\n\tdef redirect(self, target):\n\t\treturn self.hsts(redirect(target))\n\n\tdef get_referer(self):\n\t\treturn request.headers.get('referer')\n\n\tdef get_date(self, year=0, month=0, day=0):\n\t\tif year and month and day:\n\t\t\treturn date(year, month, day)\n\t\telse:\n\t\t\treturn date.today()\n\n\tdef get_args(self, key):\n\t\treturn request.args.get(key)\n\n\tdef form(self):\n\t\tform = {}\n\t\tif request.is_json:\n\t\t\tform = request.get_json()\t# type-dict\n\t\telse:\n\t\t\tform = request.form.to_dict()\n\t\treturn form\n\n\tdef cookies(self):\n\t\treturn request.cookies\n\n\tdef logout(self):\n\t\tresp = make_response(self.redirect('/'))\n\t\tresp.set_cookie(key='uid', value='', max_age=5)\n\t\tresp.set_cookie(key='username', value='', max_age=5)\n\t\treturn resp\n\n\t# cookie 用uid + secret(fixed) 进行hmac\n\t# pw_hash 用name + pw + salt(random) 进行hash\n\tdef login(self, user):\n\t\tif request.is_json:\n\t\t\tresp = make_response('welcome')\n\t\telse:\n\t\t\ttarget = '/welcome'\n\t\t\tif self.get_args('redirect'):\n\t\t\t\t# /welcome?redirect=/blog/new \n\t\t\t\ttarget += '?redirect=%s' % self.get_args('redirect')\n\t\t\tresp = make_response(self.redirect(target))\n\t\tresp.set_cookie(key='username', value=user.username)\n\t\t\n\t\tuid = user.uid\n\t\tusername = user.username\n\t\tkey = 'uid'\n\t\tval = make_secure_cookie(user.uid, username)\n\t\tresp.set_cookie(key=key, value=val)\n\t\treturn resp\n\n\t# TODO\n\tdef check_valid_cookie(self):\n\t\tval = self.cookies().get('uid')\n\t\tusername = self.cookies().get('username')\n\t\treturn check_secure_cookie(val, username)\n\n\tdef json_response(self, record_list=[], data=''):\n\t\tif record_list:\n\t\t\titem_list = []\n\t\t\tfor record in record_list:\n\t\t\t\titem = {}\n\t\t\t\tfor key in record.keys():\n\t\t\t\t\titem[key] = record[key]\n\t\t\t\titem_list.append(item)\n\t\t\tdata = json.dumps(item_list, ensure_ascii=False)\n\t\tresp = make_response(data)\n\t\tresp.headers['content-type'] = 'application/json'\n\t\t# resp.headers['last-modified'] = strftime(\"%a, %d %b %Y %H:%M:%S GMT\", gmtime())\n\t\treturn resp\n\n\tdef png_response(self, filename):\n\t\twith open('./assets/%s' % filename) as f:\n\t\t\tresp = make_response(f.read())\n\t\tresp.headers['content-type'] = 'image/png'\n\t\treturn resp\n\ndef make_secure_cookie(uid, username):\n\tmsg = uid + username\n\tdigest = hmac.new(hkey, msg.encode()).hexdigest()\n\tval = '%s|%s' % (uid, digest)\n\treturn val\n\ndef check_secure_cookie(val, username):\n\tif val:\n\t\tuid = val.split('|')[0]\n\t\tus_val = make_secure_cookie(uid, username)\n\t\treturn val == us_val\n\t"
},
{
"alpha_fraction": 0.5835616588592529,
"alphanum_fraction": 0.6054794788360596,
"avg_line_length": 17.299999237060547,
"blob_id": "0e74c02c0c3fe736e1831c954c986be9f598414d",
"content_id": "7aa834e073cdb0986f366d334aa71f7fa0e46187",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 365,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 20,
"path": "/application_layer/http/simple_client/simple_server.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "def main():\n\timport socket\n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\ts.connect(('github.com', 80))\n\ts.send(b'GET / HTTP/1.1\\r\\nHost: github.com\\r\\nConnection: close\\r\\n\\r\\n')\n\tbuf = []\n\n\twhile True:\n\t\td = s.recv(1024)\n\t\tif d:\n\t\t\tbuf.append(d)\n\t\telse:\n\t\t\tbreak\n\n\tdata = b''.join(buf)\n\tprint(data.decode())\n\ts.close()\n\nif __name__ == '__main__':\n\tmain()"
},
{
"alpha_fraction": 0.5667580962181091,
"alphanum_fraction": 0.5704160928726196,
"avg_line_length": 20.135265350341797,
"blob_id": "825cdfccdb770c2ab2da0681c6f9c4bbf5495c3b",
"content_id": "8178087cfc7bc37e34a6b579f94a06f676c9eed1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 4386,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 207,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/artwork/artwork.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var comp = Vue.component('modal', {\n\ttemplate: '#modal-template',\n\tdata: function(){\n\t\treturn {\n\t\t\tuser: {\n\t\t\t\tusername: '',\n\t\t\t\tpassword: ''\n\t\t\t},\n\t\t\terror: {\n\t\t\t\tusername: '',\n\t\t\t\tpassword: ''\n\t\t\t}\n\t\t}\n\t},\n\tmethods: {\n\t\treset: function(){\n\t\t\tthis.user = {\n\t\t\t\tusername: '',\n\t\t\t\tpassword: ''\n\t\t\t};\n\t\t\tthis.error = {\n\t\t\t\tusername: '',\n\t\t\t\tpassword: ''\n\t\t\t}\n\t\t},\n\t\tcheckValidSignin: function(){\n\t\t\tvar valid = true;\n\t\t\tif(this.user.username){\n\t\t\t\tthis.error.username = '';\n\t\t\t}else{\n\t\t\t\tthis.error.username = 'Invalid username';\n\t\t\t\tvalid = false;\n\t\t\t}\n\n\t\t\tif(this.user.password){\n\t\t\t\tthis.error.password = '';\n\t\t\t}else{\n\t\t\t\tthis.error.password = 'Invalid password';\n\t\t\t\tvalid = false;\n\t\t\t}\n\t\t\treturn valid;\n\t\t},\n\t\tsignin: function(){\n\t\t\tif( this.checkValidSignin() ){\n\t\t\t\tvar self = this;\n\t\t\t\t// console.log(self.user);\n\t\t\t\taxios.post('/signin', self.user)\n\t\t\t\t\t.then( function(resp){\n\t\t\t\t\t\t// console.log(resp);\n\t\t\t\t\t\tif(resp.data === 'welcome'){\n\t\t\t\t\t\t\tself.reset();\n\t\t\t\t\t\t\tself.$emit('succ');\n\t\t\t\t\t\t}else{\n\t\t\t\t\t\t\tself.error.username = 'Invalid login';\n\t\t\t\t\t\t\tself.error.password = 'Invalid login';\n\t\t\t\t\t\t\t// console.log('Invalid login');\n\t\t\t\t\t\t}\n\t\t\t\t\t} );\n\t\t\t}\n\t\t},\n\t\tcancel: function(){\n\t\t\tthis.reset();\n\t\t\tthis.$emit('cancel');\n\t\t}\n\t}\n})\n\nvar app = new Vue({\n\tel: '#artwork',\n\tdata: {\n\t\tpathname: '',\n\t\tresp: '',\n\t\tshowSigninModal: false,\n\t\terror:{\n\t\t\tid: '',\n\t\t\tsubject: '',\n\t\t\tcontent: ''\n\t\t},\n\t\tartwork: {\n\t\t\tid: '',\n\t\t\tsubject: '',\n\t\t\tcontent: '',\n\t\t\ttype: ''\n\t\t},\n\t\tartworkList: []\n\t},\n\tcreated: function(){\n\t\tthis.pathname = document.location.pathname;\n\t\tthis.getArtworkList();\n\t},\n\n\tmethods:{\n\t\tsetAddArtwork: function(){\n\t\t\tthis.reset();\n\t\t\tthis.artwork.type = 'add';\n\t\t},\n\t\tsetUpdateArtwork: function(){\n\t\t\tthis.reset();\n\t\t\tthis.artwork.type = 'update';\n\t\t},\n\t\treset: function(){\n\t\t\tthis.artwork = {\n\t\t\t\tid: '',\n\t\t\t\tsubject: '',\n\t\t\t\tcontent: '',\n\t\t\t\ttype: ''\n\t\t\t};\n\t\t\tthis.error = {\n\t\t\t\tid: '',\n\t\t\t\tsubject: '',\n\t\t\t\tcontent: ''\n\t\t\t};\n\t\t\tthis.showSigninModal = false;\n\t\t},\n\t\tcheckValidArtwork: function(){\n\t\t\tvar valid = true;\n\t\t\tif( this.artwork.type === 'update' ){\n\t\t\t\tvar reDigit = /^\\d+$/;\n\t\t\t\tif( reDigit.test(this.artwork.id) && this.artwork.id>0 ){\n\t\t\t\t\tthis.error.id = '';\n\t\t\t\t}else{\n\t\t\t\t\tthis.error.id = 'Invalid ID';\n\t\t\t\t\tvalid = false;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif( this.artwork.subject ){\n\t\t\t\tthis.error.subject = '';\n\t\t\t}else{\n\t\t\t\tthis.error.subject = 'Invalid Subject';\n\t\t\t\tvalid = false;\n\t\t\t}\n\n\t\t\tif( this.artwork.content ){\n\t\t\t\tthis.error.content = '';\n\t\t\t}else{\n\t\t\t\tthis.error.content = 'Invalid Content';\n\t\t\t\tvalid = false;\n\t\t\t}\n\t\t\treturn valid;\n\t\t},\n\t\tsubmit: function(){\n\t\t\tif( this.checkValidArtwork() === true ){\n\t\t\t\tvar APIpost = `${this.pathname}`;\n\t\t\t\tvar self = this;\n\t\t\t\taxios.post(APIpost, self.artwork)\n\t\t\t\t\t.then( function (resp){\n\t\t\t\t\t\tself.resp = resp;\n\t\t\t\t\t\tif(resp.data === 'signin'){\n\t\t\t\t\t\t\t// fill in the modal signin form\n\t\t\t\t\t\t\tself.showSigninModal = true;\n\t\t\t\t\t\t}else if(resp.data === 'inserted'){\n\t\t\t\t\t\t\t// todo 增加过渡效果\n\t\t\t\t\t\t\tself.artworkList.unshift(self.artwork);\n\t\t\t\t\t\t\tself.reset();\n\t\t\t\t\t\t}else if(resp.data === 'updated'){\n\t\t\t\t\t\t\tvar index = self.artworkList.length - self.artwork.id;\n\t\t\t\t\t\t\tself.artworkList[index] = self.artwork;\n\t\t\t\t\t\t\tself.reset()\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t}\n\t\t},\n\n\t\tgetArtworkList: function(){\n\t\t\tvar APIjson = `${this.pathname}?q=json`;\n\t\t\tvar self = this;\n\t\t\taxios.get(APIjson)\n\t\t\t\t.then(function(resp){\n\t\t\t\t\tself.artworkList = resp.data;\n\t\t\t\t})\n\t\t}\n\t}\n})\n\n\nfunction toggle_bg(e){\n\tif (document.getElementById(\"btn\").value==\"Dark on Light\"){\n\t\tdocument.getElementById(\"btn\").value=\"Light on Dark\";\n\t\tdocument.getElementById(\"artwork_list\").style.background = \"#FFFFFF\";\n\t\tdocument.getElementById(\"artwork_list\").style.color = \"#000000\";\n\t}\n\n\t//Checking if select field is disabled\n\telse {\n\t\t//Change the select field state to enabled and changing the value of button to disable\n\t\tdocument.getElementById(\"btn\").value=\"Dark on Light\";\n\t\tdocument.getElementById(\"artwork_list\").style.background = \"#2A4767\";\n\t\tdocument.getElementById(\"artwork_list\").style.color = \"#FFFFFF\";\n\t}\n}\n\n\n\n\n// getArtworkList: function(){\n// \tvar self = this;\n// \tvar xhr = new XMLHttpRequest()\n// \txhr.onreadystatechange = function(){\n// \t\tif( xhr.readyState === 4 && xhr.status === 200 ){\n// \t\t\tvar resp = JSON.parse(xhr.responseText)\n// \t\t\tself.artworkList = resp\n// \t\t}\n// \t}\n// \txhr.open('GET', './assets/artwork.json')\n// \txhr.send()\n// }"
},
{
"alpha_fraction": 0.5645582675933838,
"alphanum_fraction": 0.5836382508277893,
"avg_line_length": 20.37430191040039,
"blob_id": "7b2c546cd4f61f8196149a24b9715f2b551c7f18",
"content_id": "31a6bb533728dd46f71c40e10fae7988c1501de3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 3878,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 179,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/nba/nba.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var teamList = [];\nconst total = 5;\nconst maxVal = 87;\nconst maxY = 90;\nconst center = {\n\tx: 150,\n\ty: 150\n}\n\nvar labelList = [\n\t{\tx: 137, y:70, name:'得分'\t},\n\t{\tx: 221, y:128, name:'助攻'\t},\n\t{\tx: 192, y:228, name:'篮板'\t},\n\t{\tx: 88, y:230, name:'抢断'\t},\n\t{\tx: 45, y:130, name:'盖帽'\t}\n]\n\nVue.component('polygraph', {\n\ttemplate: '#polygraph-template',\n\tprops: ['id'],\n\tcomputed: {\n\t\trankPoints: function(){\n\t\t\treturn getRankPoints(this.id);\n\t\t}\n\t}\n})\n\nVue.component('polygraph-base', {\n\ttemplate: '#polygraph-base-template',\n\tdata: function(){\n\t\treturn {\n\t\t\tbase1: getBase(1),\n\t\t\tbase2: getBase(2),\n\t\t\tbase3: getBase(3),\n\t\t\tbase4: getBase(4),\n\t\t\taxis: getAxis(4),\n\t\t}\n\t}\n})\n\nVue.component('axis-label', {\n\ttemplate: '#axis-label-template',\n\tprops: ['label']\n})\n\n\nvar app = new Vue({\n\tel: '#nba',\n\tdata: {\n\t\tteamList: false,\n\t\tpathname: '',\n\t\tlabelList: labelList,\n\t\tcurId: '',\n\t\ttooltipText: ''\n\t},\n\tcreated: function(){\n\t\tthis.pathname = document.location.pathname;\n\t\tthis.getTeamList();\n\n\t},\n\tmethods: {\n\t\tsetCurId: function(conf, col, row){\n\t\t\tthis.curId = getTeamListId(conf, col, row);\n\t\t\tthis.updateTooltipText();\n\t\t},\n\t\tgetTeamHomepage: function(conf, col, row){\n\t\t\tvar team = getTeam(conf, col, row);\n\t\t\treturn team.homepage;\n\t\t},\n\t\tgetTeamLogo: function(conf, col, row){\n\t\t\tvar team = getTeam(conf, col, row);\n\t\t\treturn team.logo;\n\t\t},\n\t\tgetTeamName: function(conf, col, row){\n\t\t\tvar team = getTeam(conf, col, row);\n\t\t\treturn team.name;\n\t\t},\n\t\tupdateTooltipText: function(){\n\t\t\tvar team = teamList[this.curId];\n\t\t\tvar name = team['name'];\n\t\t\tvar rank = team['stat']['rank'];\n\t\t\tthis.tooltipText = `${name}\\n`\n\t\t\t\t\t\t\t+ `场均数据排名\\n`\n\t\t\t\t\t\t\t+ `[得分]: No.${rank['pointsRank']}\\n`\n\t\t\t\t\t\t\t+ `[助攻]: No.${rank['assistsRank']}\\n`\n\t\t\t\t\t\t\t+ `[篮板]: No.${rank['reboundsRank']}\\n`\n\t\t\t\t\t\t\t+ `[盖帽]: No.${rank['stealsRank']}\\n`\n\t\t\t\t\t\t\t+ `[抢断]: No.${rank['blocksRank']}\\n`\n\t\t},\n\t\tgetTeamList: function(){\n\t\t\tvar APIjson = `${this.pathname}?q=json`;\n\t\t\tvar self = this;\n\t\t\taxios.get(APIjson)\n\t\t\t\t.then(function(resp){\n\t\t\t\t\tteamList = resp.data;\n\t\t\t\t\tself.teamList = true;\n\t\t\t\t})\n\t\t}\n\t}\n})\n\nfunction getTeamListId(conf, col, row){\n\tvar id = 0;\n\tif(conf === 'east'){\n\t\tid = (col - 1) * 5 + (row - 1);\n\t}else if(conf === 'west'){\n\t\tid = (col - 1) * 5 + (row - 1) + 15;\n\t}\n\treturn id;\n}\n\nfunction getTeam(conf, col, row){\n\tvar teamListId = getTeamListId(conf, col, row);\n\tvar team = teamList[teamListId];\n\treturn team;\n}\n\n\n// math helper...\nfunction valueToPoint (value, index) {\n\tvar x = 0;\n\tvar y = -value * 0.8;\n\tvar angle = Math.PI * 2 / total * index;\n\tvar cos = Math.cos(angle);\n\tvar sin = Math.sin(angle);\n\tvar tx = x * cos - y * sin + center.x;\n\tvar ty = x * sin + y * cos + center.y;\n\treturn {\n\t\tx: tx,\n\t\ty: ty\n\t}\n}\n\nfunction getRankPoints(teamListId){\n\tvar points = [], valY, point;\n\tvar rank = teamList[teamListId]['stat']['rank'];\n\n\tvalY = maxY - 3*rank['pointsRank'];\n\tpoint = valueToPoint(valY, 0);\n\tpoints.push(`${point.x},${point.y}`);\n\n\tvalY = maxY - 3*rank['assistsRank'];\n\tpoint = valueToPoint(valY, 1);\n\tpoints.push(`${point.x},${point.y}`);\n\n\tvalY = maxY - 3*rank['reboundsRank'];\n\tpoint = valueToPoint(valY, 2);\n\tpoints.push(`${point.x},${point.y}`);\n\n\tvalY = maxY - 3*rank['stealsRank'];\n\tpoint = valueToPoint(valY, 3);\n\tpoints.push(`${point.x},${point.y}`);\n\n\tvalY = maxY - 3*rank['blocksRank'];\n\tpoint = valueToPoint(valY, 4);\n\tpoints.push(`${point.x},${point.y}`);\n\n\treturn points.join(' ');\n}\n\n\nfunction getBase (level){\n\tvar base = [];\n\tfor(let i=0; i < total; i++){\n\t\tvar point = valueToPoint(maxVal*level/4, i);\n\t\tbase.push(`${point.x},${point.y}`);\n\t}\n\treturn base.join(' ');\n}\n\nfunction getAxis(level){\n\tvar axis = [];\n\tfor (let i = 0; i < total; i++ ){\n\t\tvar point = valueToPoint(maxVal, i);\n\t\taxis.push(`${point.x},${point.y}`);\n\t\taxis.push(`${center.x},${center.y}`);\n\t}\n\treturn axis.join(' ');\n}\n"
},
{
"alpha_fraction": 0.6601044535636902,
"alphanum_fraction": 0.6653274297714233,
"avg_line_length": 27.284090042114258,
"blob_id": "b6d263048c1c5f6ea63be980ae9ad272af7e8de2",
"content_id": "23d39cee170adf7675de8af1b811e3e1978835a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2489,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 88,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/blog.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\nfrom database import Database\n\nclass Post(object):\n\tdef __init__(self, id='', update_date='', subject='', content=''):\n\t\tself.id = id\n\t\tself.update_date = update_date\n\t\tself.subject = subject\n\t\tself.content = content\n\nclass Record(Post):\n\tdef insert(self):\n\t\tquery = 'insert into posts (update_date, subject, content) values (?, ?, ?)' \n\t\targs = (self.update_date, self.subject, self.content)\t\n\t\treturn Database().query_db(query, args)\n\n\tdef update(self):\n\t\tquery = 'update posts set update_date = ?, subject = ?, content = ? where id = ?'\n\t\targs = (self.update_date, self.subject, self.content, self.id)\n\t\treturn Database().query_db(query, args)\n\n\tdef retrieve(self, id=0, limit=10):\n\t\tif id > 0:\n\t\t\tquery = 'select * from posts where id = %d' % id\t\n\t\telse:\n\t\t\tquery = 'select * from posts order by id desc limit %d' % limit\n\t\treturn Database().query_db(query)\n\n\tdef delete(self):\n\t\tquery = 'delete from posts where id = ?'\n\t\targs = (self.id, )\n\t\treturn Database().query_db(query, args)\n\nclass BlogIndexHandler(Page):\n\tfilename = 'blog/index/index.html'\n\t\n\tdef get(self):\n\t\tif self.get_args('q') == 'json':\n\t\t\trecord_list = Record().retrieve(limit=10)\n\t\t\treturn self.json_response(record_list)\n\t\telse:\n\t\t\treturn self.render_raw(self.filename)\n\nclass BlogPostHandler(Page):\n\tfilename = 'blog/post/post.html'\n\t\n\tdef get(self, id):\n\t\tif self.get_args('q') == 'json':\n\t\t\trecord_list = Record().retrieve(id=id)\n\t\t\treturn self.json_response(record_list)\n\t\telse:\n\t\t\treturn self.render_raw(self.filename)\n\t\n\t# update blogpost\n\tdef post(self, id):\n\t\tif self.check_valid_cookie():\n\t\t\tprint('logged')\n\t\t\tform = self.form()\n\t\t\tsubject = form.get('subject')\n\t\t\tcontent = form.get('content')\n\t\t\tif subject and content:\n\t\t\t\tupdate_date = self.get_date()\n\t\t\t\tRecord(id, update_date, subject, content).update()\n\t\t\t\treturn 'updated'\n\t\t\telse:\n\t\t\t\treturn 'invalid form'\n\t\telse:\n\t\t\treturn 'signin'\n\nclass NewBlogpostHandler(Page):\n\tfilename = 'blog/new/new.html'\n\tdef get(self):\n\t\tif self.check_valid_cookie():\n\t\t\treturn self.render_raw(self.filename)\n\t\telse:\n\t\t\treturn self.redirect('/signin?redirect=/blog/new')\t\n\n\tdef post(self):\n\t\tif self.check_valid_cookie():\n\t\t\tupdate_date = self.get_date(2018, 1, 1)\n\t\t\tsubject = self.form().get('subject')\n\t\t\tcontent = self.form().get('content')\n\t\t\tif subject and content:\n\t\t\t\tRecord(0, update_date, subject, content).insert()\n\t\t\treturn self.redirect('/blog')\n\t\telse:\n\t\t\t# do not add param ?redirect=/blog/new\n\t\t\treturn self.redirect('/signin')\n"
},
{
"alpha_fraction": 0.6658729910850525,
"alphanum_fraction": 0.6809523701667786,
"avg_line_length": 24.219999313354492,
"blob_id": "2fc24e7a67cc5eb80b384a0626b9b2655afdb7f1",
"content_id": "e4c6c6a55c8485a84a4b536b78d75032db80cb22",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1260,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 50,
"path": "/udacity/cs253/Lesson02a_Templates/Check.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import re\nimport Page\n\nre_username = re.compile(r'^[a-zA-Z0-9_-]{3,20}$')\ndef valid_username(username):\n\treturn re_username.match(username)\n\nre_password = re.compile(r'^.{3,20}$')\ndef valid_password(password):\n\treturn re_password.match(password)\n\nre_email = re.compile(r'^[\\S]+@[\\S]+.[\\S]+$')\ndef valid_verify(pd1, pd2):\n\treturn pd1 == pd2\n\ndef valid_email(email):\n\tif not email:\n\t\treturn True\n\telse:\n\t\treturn re_email.match(email)\n\ndef check_signup_form(form):\n\tus_username = form['username']\n\tus_pd1 = form['password']\n\tus_pd2 = form['verify']\n\tus_email = form['email']\n\tprint('%s\\t%s\\t%s\\t%s\\n' % (us_username, us_pd1, us_pd2, us_email) )\n\n\tchecked_form = Page.get_default_signup_args()\n\tchecked_form['username'] = us_username\n\tchecked_form['email'] = us_email\n\tvalid = True\n\n\tif not valid_username(us_username):\n\t\tchecked_form['username_error'] = 'Invalid username'\n\t\tvalid = False\n\tif not valid_email(us_email):\n\t\tchecked_form['email_error'] = 'Invalid email'\n\t\tvalid = False\n\n\tif not valid_verify(us_pd1, us_pd2):\n\t\tchecked_form['verify_error'] = 'Password not matched'\n\t\tvalid = False\n\telif not valid_password(us_pd1):\n\t\tchecked_form['password_error'] = 'Invalid password'\n\t\tvalid = False\n\n\tif valid:\n\t\tchecked_form['valid'] = valid\n\treturn checked_form"
},
{
"alpha_fraction": 0.6837183833122253,
"alphanum_fraction": 0.6875687837600708,
"avg_line_length": 29.25,
"blob_id": "b9663631803a6453fd184fcaf21582b69da5922d",
"content_id": "cd0dbd8fdd86a89918014d1f324527cb28e674dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1818,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 60,
"path": "/udacity/cs253/Integration/_01-04_/artwork.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\nfrom database import Database\n\nclass Artwork(object):\n\tdef __init__(self, artwork_id='', subject='', content=''):\n\t\tself.artwork_id = artwork_id\n\t\tself.subject = subject\n\t\tself.content = content\n\nclass Record(Artwork):\n\tdef insert(self):\n\t\tquery = 'insert into artworks (subject, content) values (?, ?)'\n\t\targs = (self.subject, self.content)\n\t\treturn Database().query_db(query, args)\n\n\tdef update(self):\n\t\tquery = 'update artworks set subject = ?, content = ? where artwork_id = ?'\n\t\targs = (self.subject, self.content, self.artwork_id)\n\t\treturn Database().query_db(query, args)\n\t\t\n\tdef retrieve(self, artwork_id=0, limit=10):\n\t\tif artwork_id > 0:\n\t\t\tquery = 'select * from artworks where artwork_id = %d' % artwork_id\n\t\telse:\n\t\t\tquery = 'select * from artworks order by artwork_id desc limit %d' % limit\n\t\treturn Database().query_db(query)\n\t\n\tdef delete(self):\n\t\tquery = 'delete from artworks where artwork_id = ?'\n\t\targs = (artwork_id, )\n\t\tDatabase().query_db(query, args)\n\ndef get_artwork_list(record_list=[]):\n\tartwork_list=[]\n\tfor record in record_list:\n\t\tartwork = Artwork(*record)\n\t\tartwork_list.append(artwork)\n\treturn artwork_list\n\nclass AsciiArtHandler(Page):\n\tfilename = 'ascii_art.html'\n\n\tdef get(self):\n\t\trecord_list = Record().retrieve(limit=10)\n\t\tartwork_list = get_artwork_list(record_list)\n\t\treturn self.render(self.filename, artwork_list=artwork_list)\n\n\tdef post(self):\n\t\tsubject = self.form().get('subject')\n\t\tcontent = self.form().get('content')\n\t\tif subject and content:\n\t\t\tartwork_id = self.form().get('artwork_id')\n\t\t\tif artwork_id:\n\t\t\t\tRecord(artwork_id, subject, content).update()\n\t\t\telse:\n\t\t\t\tRecord(0, subject, content).insert()\n\t\t\treturn self.redirect('/ascii_art')\n\t\telse:\n\t\t\terror = 'subject or content is empty'\n\t\t\treturn self.render(self.filename, error=error)\n\n\t\n"
},
{
"alpha_fraction": 0.6170212626457214,
"alphanum_fraction": 0.6232790946960449,
"avg_line_length": 17.367816925048828,
"blob_id": "21760aaee79364315007ae5594cc2970c70baec0",
"content_id": "fca911c68900ddfca6a98590981d8908834f0e50",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1598,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 87,
"path": "/udacity/cs253/Lesson02_[01-26]_FormsAndInput.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n# use Flask instead of webapp2\nfrom flask import Flask\nfrom flask import Response\n# from flask import Request\nfrom flask import request\napp = Flask(__name__)\n\ncheckbox = '''\n<form method=\"post\" action=\"/checkbox\">\n\t<input type=\"checkbox\" name=\"q\">\n\t<input type=\"submit\">\n</form>\n'''\n\nradiobox = '''\n<form>\n\t<label>\n\t\tA\n\t\t<input type=\"radio\" name=\"r\" value=\"a\">\n\t</label>\n\t<br>\n\t<label>\n\t\tB\n\t\t<input type=\"radio\" name=\"r\" value=\"b\">\n\t</label>\n\t<br>\n\t<label>\n\t\tC\n\t\t<input type=\"radio\" name=\"r\" value=\"c\">\n\t</label>\n\t<br>\n\t<input type=\"submit\">\n</form>\n'''\n\ndropdown = '''\n<form>\n\t<select name=\"q\">\n\t\t<option value=\"1\">a</option>\n\t\t<option value=\"2\">b</option>\n\t\t<option value=\"3\">c</option>\n\t</select>\n\t<br><br>\n\t<input type=\"submit\">\n</form>\n'''\n\nform = '''\\\n<form method=\"post\" action=\"/form\">\n\t<br>\n\t<label>\n\t\tusername\n\t\t<input name=\"username\">\n\t</label>\n\t<br><br>\n\t<label>\n\t\tpassword\n\t\t<input name=\"password\" type=\"password\">\n\t</label>\n\t<br><br>\n\t<input type=\"submit\">\n</form>\n'''\n\[email protected]('/', methods=['GET'])\ndef HomePage():\n\treturn dropdown\n\[email protected]('/checkbox', methods=['POST'])\ndef tCheckBox():\n\tprint(request.get_data())\n\tprint(request.form['q'])\n\treturn request.get_data()\n\[email protected]('/form', methods=['GET', 'POST'])\ndef tForm():\n\tif request.method == 'GET':\n\t\tres = request.url + str(request.headers)\n\t\treturn Response(response=res, content_type='text/plain')\n\telse:\n\t\tprint(request.get_data())\n\t\treturn Response(response=request.get_data(), content_type='text/plain')\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n"
},
{
"alpha_fraction": 0.6834917068481445,
"alphanum_fraction": 0.6870546340942383,
"avg_line_length": 24.89230728149414,
"blob_id": "fb6e4c0444721b797dd9c893503eeb43c2b92bc7",
"content_id": "9244498d8128d320933a7c818d35cdfe91b45c3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1696,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 65,
"path": "/udacity/cs253/Integration/_01-04_/page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import datetime, hmac\nfrom flask import render_template, request, redirect, make_response\nfrom flask.views import MethodView\n\nhkey = 'DK'.encode()\n\nclass Page(MethodView):\n\tdef render(self, filename='', **kw):\n\t\treturn render_template(filename, **kw)\n\n\tdef get_args(self, key):\n\t\treturn request.args.get(key)\n\n\tdef redirect(self, url):\n\t\treturn redirect(url)\n\n\tdef get_date(self, year=0, month=0, day=0):\n\t\tif year and month and day:\n\t\t\treturn datetime.date(year, month, day)\n\t\telse:\n\t\t\treturn datetime.date.today()\n\n\tdef form(self):\n\t\treturn request.form.to_dict()\n\n\tdef cookies(self):\n\t\treturn request.cookies\n\n\tdef logout(self):\n\t\tresp = make_response(self.redirect('/'))\n\t\tresp.set_cookie(key='user_id', value='', max_age=5)\n\t\tresp.set_cookie(key='username', value='', max_age=5)\n\t\treturn resp\n\n\t# cookie 用user_id + secret(fixed) 进行hmac\n\t# pw_hash 用name + pw + salt(random) 进行hash\n\tdef login(self, user):\n\t\tresp = make_response(self.redirect('/welcome'))\n\t\tresp.set_cookie(key='username', value=user.username)\n\t\t\n\t\tuser_id = user.user_id\n\t\tusername = user.username\n\t\tkey = 'user_id'\n\t\tval = make_secure_cookie(user.user_id, username)\n\t\tresp.set_cookie(key=key, value=val)\n\t\treturn resp\n\n\t# TODO\n\tdef check_valid_cookie(self):\n\t\tval = self.cookies().get('user_id')\n\t\tusername = self.cookies().get('username')\n\t\treturn check_secure_cookie(val, username)\n\ndef make_secure_cookie(user_id, username):\n\tmsg = user_id + username\n\tdigest = hmac.new(hkey, msg.encode()).hexdigest()\n\tval = '%s|%s' % (user_id, digest)\n\treturn val\n\n# TODO\ndef check_secure_cookie(val, username):\n\tif val:\n\t\tuser_id = val.split('|')[0]\n\t\tus_val = make_secure_cookie(user_id, username)\n\t\treturn val == us_val\n\t"
},
{
"alpha_fraction": 0.5583652853965759,
"alphanum_fraction": 0.5785440802574158,
"avg_line_length": 21.37714195251465,
"blob_id": "c528b24b7082db469bbe16ff2949313fef5102aa",
"content_id": "3dca8359918000994b1627f7408ee50eedebe9e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 3967,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 175,
"path": "/vuejs/demo/nba_teams/app.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var url_teams = './assets/teams.json'\nvar nbateams = null\nvar total = 5\nvar maxVal = 87\nvar maxY = 90\nvar center = {\n\tx: 150,\n\ty: 150\n}\n\nvar stats = [\n\t{ label: '得分', value: maxVal },\n\t{ label: '助攻', value: maxVal },\n\t{ label: '篮板', value: maxVal },\n\t{ label: '抢断', value: maxVal },\n\t{ label: '盖帽', value: maxVal }\n]\n\nvar axisPoints = [\n\t{x: 137, y:70},\n\t{x: 221, y:128},\n\t{x: 192, y:228},\n\t{x: 88, y:230},\n\t{x: 45, y:130},\n]\n\nVue.component('polygraph', {\n\tprops: ['stats'],\n\ttemplate: '#polygraph-template',\n\tcomputed: {\n\t\tpoints: function(){\n\t\t\tvar points = this.stats.map(function(stat, i){\n\t\t\t\tvar point = valueToPoint(stat.value, i)\n\t\t\t\treturn `${point.x}, ${point.y}`\n\t\t\t}).join(' ')\n\t\t\treturn points\n\t\t},\n\t\tbase1: function() {return getBase(1)},\n\t\tbase2: function() {return getBase(2)},\n\t\tbase3: function() {return getBase(3)},\n\t\tbase4: function() {return getBase(4)},\n\t\taxis: function() {return getAxis(4)}\n\t},\n\tcomponents: {\n\t\t// a sub component for the labels\n\t\t'axis-label': {\n\t\t\tprops: {\n\t\t\t\tstat: Object,\n\t\t\t\tindex: Number,\n\t\t\t},\n\t\t\ttemplate: '#axis-label-template',\n\t\t\tcomputed: {\n\t\t\t\tpoint: function () {\n\t\t\t\t\tvar point = axisPoints[this.index]\n\t\t\t\t\treturn point\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n})\n\nvar app = new Vue({\n\tel: '#app',\n\tdata: {\n\t\tconference: ['east', 'west'],\n\t\teastRegions: ['eastsouth', 'central', 'atlantic'],\n\t\twestRegions: ['pacific', 'westnorth', 'westsouth'],\n\t\tcurrentRegion: '',\n\t\trange: [0, 1, 2, 3, 4],\n\t\tcurrentTeam: null,\n\t\tteams_east: null,\n\t\tteams_west: null,\n\t\tmessage: null,\n\t\tstats: null,\n\t\trankInfo: null\n\t},\n\n\tcreated: function () { this.fetchData() },\n\n\t// text formatting\n\tfilters: {\n\t\tgetImage: function(teamId){\n\t\t\treturn `./assets/${teamId}.png`\n\t\t},\n\t\tgetTeamPage: function(teamId){\n\t\t\treturn `http://nba.stats.qq.com/team/?id=${teamId}`\n\t\t}\n\t},\n\n\tmethods: {\n\t\tfetchData: function () {\n\t\t\tvar xhr = new XMLHttpRequest()\n\t\t\tvar self = this\n\t\t\txhr.onreadystatechange = function(){\n\t\t\t\tif(xhr.readyState === 4 && xhr.status === 200){\n\t\t\t\t\tnbateams = JSON.parse(xhr.responseText)\n\t\t\t\t\tself.teams_east = nbateams['east']\n\t\t\t\t\tself.teams_west = nbateams['west']\n\t\t\t\t}\n\t\t\t}\n\t\t\txhr.open('GET', url_teams);\n\t\t\txhr.send()\n\t\t},\n\t\tgetTeams: function(){\n\t\t\tthis.teams = nbateams\n\t\t},\n\t\tgetRegion: function(){\n\t\t\tthis.teams = nbateams\n\t\t},\n\t\tadd: function(){\n\t\t\tif(this.stats[2].value < 80){\n\t\t\t\tthis.stats[2].value += 1\n\t\t\t}\n\t\t},\n\t\tgetChart: function(conf, region, ix){\n\t\t\tif(conf && region){\n\t\t\t\tvar team = nbateams[conf][region][ix-1]\n\t\t\t\tthis.currentTeam = team\n\n\t\t\t\tthis.updateStats()\n\t\t\t}\n\t\t},\n\t\tupdateStats: function(){\n\t\t\tvar rank = this.currentTeam['statRank']\n\t\t\tstats[0].value = maxY - 3*rank['pointsRank']\n\t\t\tstats[1].value = maxY - 3*rank['assistsRank']\n\t\t\tstats[2].value = maxY - 3*rank['reboundsRank']\n\t\t\tstats[3].value = maxY - 3*rank['stealsRank']\n\t\t\tstats[4].value = maxY - 3*rank['blocksRank']\n\t\t\tconsole.log(stats)\n\t\t\tthis.stats = stats\n\t\t\tthis.rankInfo = `${this.currentTeam['name']}\\n`\n\t\t\t\t\t\t\t+ `场均数据排名\\n`\n\t\t\t\t\t\t\t+ `[得分]: No.${rank['pointsRank']}\\n`\n\t\t\t\t\t\t\t+ `[助攻]: No.${rank['assistsRank']}\\n`\n\t\t\t\t\t\t\t+ `[篮板]: No.${rank['reboundsRank']}\\n`\n\t\t\t\t\t\t\t+ `[盖帽]: No.${rank['stealsRank']}\\n`\n\t\t\t\t\t\t\t+ `[抢断]: No.${rank['blocksRank']}\\n`\n\t\t}\n\t}\n})\n\n// math helper...\nfunction valueToPoint (value, index) {\n\tvar x = 0;\n\tvar y = -value * 0.8;\n\tvar angle = Math.PI * 2 / total * index;\n\tvar cos = Math.cos(angle);\n\tvar sin = Math.sin(angle);\n\tvar tx = x * cos - y * sin + center.x;\n\tvar ty = x * sin + y * cos + center.y;\n\treturn {\n\t\tx: tx,\n\t\ty: ty\n\t}\n}\n\nfunction getBase (level) {\n\tvar base = []\n\tfor(let i=0; i < total; i++){\n\t\tvar point = valueToPoint(maxVal*level/4, i);\n\t\tbase.push(`${point.x},${point.y}`)\n\t}\n\treturn base.join(' ')\n}\n\nfunction getAxis(level){\n\tvar axis = []\n\tfor (let i = 0; i < total; i++ ){\n\t\tvar point = valueToPoint(maxVal, i);\n\t\taxis.push(`${point.x},${point.y}`)\n\t\taxis.push(`${center.x},${center.y}`)\n\t}\n\treturn axis.join(' ')\n}"
},
{
"alpha_fraction": 0.6571428775787354,
"alphanum_fraction": 0.6647619009017944,
"avg_line_length": 22.33333396911621,
"blob_id": "59139ee9c6c59c9e6407b92820cac368678556b0",
"content_id": "4ce93600c8582ba5e8b020de17473b34e04ae73b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1050,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 45,
"path": "/udacity/cs253/Lesson02a_Templates/Page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from html import escape\nfrom flask import render_template # flask will help us auto escape html\nimport os\nimport ROT13, FizzBuzz\n\ndef fill_template(name='index', **kw):\n\tfilename = '%s.html' % name\n\treturn render_template(filename, **kw)\n\ndef get_default_signup_args():\n\treturn {'username': '',\n\t\t\t'username_error': '',\n\t\t\t'password_error': '',\n\t\t\t'verify_error': '',\n\t\t\t'email': '',\n\t\t\t'email_error': ''}\n\ndef render_fizzbuzz(n):\n\tfizzbuzz = FizzBuzz.get(n)\n\tpage = fill_template('fizzbuzz', FizzBuzz=fizzbuzz)\n\treturn page\n\ndef render_index():\n\tpage = fill_template('index')\n\treturn page\n\ndef render_rot13(text=''):\n\ttext = ROT13.encode(text)\n\targs = {'text': text}\n\treturn fill_template('rot13', **args)\n\ndef render_signup(form={}):\n\tif form:\n\t\targs = form\n\telse:\n\t\targs = get_default_signup_args()\n\tprint(args)\n\treturn fill_template('signup', **args)\n\ndef render_welcome(username=''):\n\tif username:\n\t\targs = {'username': username, 'a': 'a'}\n\t\treturn fill_template('welcome', **args)\n\telse:\n\t\treturn 'Invalid username<br><br><a href=\"/\">Back</a>'\n"
},
{
"alpha_fraction": 0.7655172348022461,
"alphanum_fraction": 0.7655172348022461,
"avg_line_length": 23.16666603088379,
"blob_id": "ae9c3c7cf903391315da86156732a1abcdc0df62",
"content_id": "73c1b2c5fdcc1b18bd1e3221558cf63aee874246",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 145,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 6,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/fizzbuzz.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass FizzBuzzHandler(Page):\n\tfilename = 'fizzbuzz/fizzbuzz.html'\n\tdef get(self):\n\t\treturn self.render_raw(self.filename)\n"
},
{
"alpha_fraction": 0.6215583086013794,
"alphanum_fraction": 0.629173994064331,
"avg_line_length": 15.257143020629883,
"blob_id": "5fcd00bddbe26ad5915f45bae41bcd6df03a66eb",
"content_id": "0860ae03fc51af02f28c7b8fa48f4969ee85b347",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2258,
"license_type": "no_license",
"max_line_length": 126,
"num_lines": 105,
"path": "/vuejs/demo/nba_teams/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "腾讯NBA的[这个页面](http://nba.stats.qq.com/team/list.htm)展示了东西部联盟的各个球队,而通过开发者工具可以发现更多json格式的详细信息。\n\n本demo旨在获取与展示这些信息。\n\n* TODO\n\n\t结合另一个demo: [nba_team_chart](https://github.com/jJayyyyyyy/network/tree/master/vuejs/demo/nba_team_chart),来展示各支球队的在5项数据统计上的排名。\n\n<br><br><br>\n\n## 笔记\n\n### 1.img\n\n* 固定路径(原始html)\n\n\tindex.html如下,其中,引号\"\"里面就是图片的路径地址\n\n\t```html\n\t<img src=\"./assets/1.png\">\n\t```\n\n* 单个可变路径\n\n\tindex.html如下\n\n\t```html\n\t<div id=\"app\">\n\t\t\t<img v-bind:src=\"imgSrc\">\n\t</div>\n\t```\n\n\t对应地,app里面要有src,\n\n\t```javascript\n\tvar app = new Vue({\n\t\t\tel: '#app',\n\t\t\tdata: {\n\t\t\t\t\timgSrc: './assets/2.png'\n\t\t\t}\n\t}\n\t```\n\n\t这样就可以通过改变`imgSrc`来改变某一个img标签指向的图片了\n\n* basePath + 参数\n\n\t比如有10张图片放在`./assets/`目录中,图片名`1.png`, `2.png` ...\n\n\tVue的文档里面有这么一句话\n\n\t> Vue.js allows you to define filters that can be used to apply common text formatting.\n\n\t因此需要借助filter。html如下,其中`img_id`是图片名中的数字,如1,2,3... 而`getImage`是filter中的一个key\n\n\t```html\n\t<div id=\"app\">\n\t\t\t<img v-bind:src=\"img_id | getImage\">\n\t</div>\n\t```\n\n\tVue的options要添加filters\n\n\t```javascript\n\tvar app = new Vue({\n\t\t\tel: '#app',\n\t\t\tdata: {\n\t\t\t\t\timgSrc: './assets/2.png'\n\t\t\t},\n\n\t\t\t// text formatting\n\t\t\tfilters: {\n\t\t\t\t\tgetImage: function(teamId){\n\t\t\t\t\t\t\treturn `./assets/${teamId}.png`\n\t\t\t\t\t}\n\t\t\t},\n\t}\n\t```\n\n### 2.component\n\n*\t异步加载的东西,暂时无法做到用v-if先判断,再更新到component\n\n\t举例:\n\n\t```html\n\t<tbody v-if=\"teams_east\">\n\t\t<tr v-for=\"ix in range\">\n\t\t\t<td-team v-for=\"region in eastRegions\"\n\t\t\t\t\tv-bind:team=\"teams_east[region][ix]\">\n\t\t\t</td-team>\n\t\t</tr>\n\t</tbody>\n\t```\n\n\t```javascript\n\tVue.component('td-team', {\n\t\tprops: ['team'],\n\t\ttemplate: '<td>{{ team['name'] }}</td>'\n\t})\n\t```\n\n\t说明:在xhr获得的json的teams之前,teams_east还是Undefined,就算用了if进行conditional render`<tbody v-if=\"teams_east\">`也绕不开\n\n\t因此,猜测component必须有对应实体\n"
},
{
"alpha_fraction": 0.5708812475204468,
"alphanum_fraction": 0.5708812475204468,
"avg_line_length": 17.407407760620117,
"blob_id": "324099a69a0212b3ca7880bc5b366b5520320f08",
"content_id": "1e7598de20cf5e62f18b6c66d187e7293146d9f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 522,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 27,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/blog/index/index.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var app = new Vue({\r\n\tel: '#index',\r\n\tdata: {\r\n\t\tpathname: '',\r\n\t\tpostList: []\r\n\t},\r\n\tcreated: function(){\r\n\t\tthis.getPostList();\r\n\t\tthis.pathname = document.location.pathname;\r\n\t},\r\n\tfilters:{\r\n\t\tgetPostUrl: function(id){\r\n\t\t\tvar pathname = document.location.pathname;\r\n\t\t\treturn `${pathname}/${id}`;\r\n\t\t}\r\n\t},\r\n\tmethods: {\r\n\t\tgetPostList: function(){\r\n\t\t\tvar APIjson = `${this.pathname}?q=json`;\r\n\t\t\tvar self = this;\r\n\t\t\taxios.get(APIjson)\r\n\t\t\t\t.then(function(resp){\r\n\t\t\t\t\tself.postList = resp.data;\r\n\t\t\t\t})\r\n\t\t}\r\n\t}\r\n})"
},
{
"alpha_fraction": 0.5718181729316711,
"alphanum_fraction": 0.5790908932685852,
"avg_line_length": 17.6610164642334,
"blob_id": "f9a7223b0052888f22c44e05d9799dc009c45f28",
"content_id": "137f80daca9058bbaf67dde4c448290753e01fba",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1100,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 59,
"path": "/vuejs/demo/signin/signin.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var reUsername = /^[a-zA-Z0-9_-]{3,20}$/;\nvar rePassword = /^.{3,20}$/;\n\nvar app = new Vue({\n\tel: '#signin',\n\tdata: {\n\t\tusername: '',\n\t\tpassword: '',\n\t\tvalidUsername: false,\n\t\tvalidPassword: false,\n\t\tisValid: false\n\t},\n\n\tcomputed: {\n\t\tusernameError: function(){\n\t\t\tif( this.username ){\n\t\t\t\tif( reUsername.test(this.username) ){\n\t\t\t\t\tthis.validUsername = true;\n\t\t\t\t\treturn null;\n\t\t\t\t}else{\n\t\t\t\t\tthis.validUsername = false;\n\t\t\t\t\treturn 'Invalid Username';\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tpasswordError: function(){\n\t\t\tif( this.password ){\n\t\t\t\tif( rePassword.test(this.password) ){\n\t\t\t\t\tthis.validPassword = true;\n\t\t\t\t\treturn null;\n\t\t\t\t}else{\n\t\t\t\t\tthis.validPassword = false;\n\t\t\t\t\treturn 'Invalid password';\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tshowSubmit: function(){\n\t\t\tthis.isValid = (this.validUsername && this.validPassword);\n\t\t\treturn this.isValid;\n\t\t}\n\t},\n\t// methods\n\tmethods: {\n\t\tsucc: function(){\n\t\t\tif (this.isValid) {\n\t\t\t\tvar user = {\n\t\t\t\t\tusername: this.username,\n\t\t\t\t\tpassword: this.password\n\t\t\t\t};\n\t\t\t\tconsole.log(user);\n\t\t\t\t// var form = Object;\n\t\t\t\t// form.push(user);\n\t\t\t}\n\t\t},\n\t\tpostForm: function () {\n\t\t\t;\n\t\t}\n\t}\n})"
},
{
"alpha_fraction": 0.5958333611488342,
"alphanum_fraction": 0.643750011920929,
"avg_line_length": 23.049999237060547,
"blob_id": "ddb866f88653bf4b10d36462fb77c0cf77333116",
"content_id": "30ad0e21c4b508ce900f4dbe1a00aafd30ff9b5b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 480,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 20,
"path": "/transport_layer/udp/udp_python/udp_server.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n# -*- coding: utf-8 -*-\nimport socket\n\ns = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\nserver_host = '192.168.1.104'\nport = 8000\ns.bind((server_host, port))\nprint('\\nserver_host: %s\\tport: %d\\n' % (server_host, port))\n\nwhile True:\n\tdata, addr = s.recvfrom(1024)\n\tif not data:\n\t\tprint('Client offline!\\n')\n\t\tbreak\n\tprint('Received from %s:%s' % addr)\n\tprint('data: %s\\n' % data.decode('utf-8'))\n\ts.sendto(b'Hello '+ addr[0].encode('utf-8'), addr)\n\ns.close()"
},
{
"alpha_fraction": 0.5646687746047974,
"alphanum_fraction": 0.5646687746047974,
"avg_line_length": 17.676469802856445,
"blob_id": "6cd6e083dafb049e524b77285d9197e1e0810cba",
"content_id": "89378fddedb86c116ad07b21908a18d839e412ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 634,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 34,
"path": "/website/static/blog/new/new.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "'use strict';\n\nvar app = new Vue({\n\tel: '#editor',\n\tdata: {\n\t\tpathname: '',\n\t\tsubject: '',\n\t\tcontent: '',\n\t\tvalid: false,\n\t},\n\tcreated: function(){\n\t\tthis.pathname = document.location.pathname;\n\t},\n\tmethods: {\n\t\tshowModal: function(){\n\t\t\tthis.valid = this.subject != '' && this.content != '';\n\t\t\treturn !this.valid;\n\t\t},\n\t\tsubmit: function(){\n\t\t\tif( this.valid ){\n\t\t\t\tvar self = this;\n\t\t\t\tvar APIpost = `${self.pathname}`;\n\t\t\t\tvar newPost = {\n\t\t\t\t\t'subject': self.subject,\n\t\t\t\t\t'content': self.content,\n\t\t\t\t};\n\t\t\t\taxios.post(APIpost, newPost)\n\t\t\t\t\t.then( function(resp){\n\t\t\t\t\t\twindow.location.replace(resp.data)\n\t\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n})"
},
{
"alpha_fraction": 0.8247422575950623,
"alphanum_fraction": 0.8247422575950623,
"avg_line_length": 26.714284896850586,
"blob_id": "a24163f2e4acf529bb288d1a3953be52ab9d0de2",
"content_id": "748d579ea7d9077ee53a647c27c78c672fdaab71",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 284,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 7,
"path": "/vuejs/demo/nba_team_chart/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "在腾讯NBA的球队页面,球队实力一栏用到了radar chart\n\n\n\ndemo旨在模仿实现类似的图表\n\n注意,企鹅用的是canvas,demo中目前使用的是svg\n"
},
{
"alpha_fraction": 0.6529636979103088,
"alphanum_fraction": 0.6644359230995178,
"avg_line_length": 22.75,
"blob_id": "803c58a6f0e94d21eebe175a44db53716d7cf5a4",
"content_id": "2f4dbcda103f10f55f1e4afc6cad777370d808d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1046,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 44,
"path": "/udacity/cs253/Integration/_01-03_/check.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import re\nimport page\n\nre_username = re.compile(r'^[a-zA-Z0-9_-]{3,20}$')\ndef valid_username(username):\n\treturn re_username.match(username)\n\nre_password = re.compile(r'^.{3,20}$')\ndef valid_password(password):\n\treturn re_password.match(password)\n\nre_email = re.compile(r'^[\\S]+@[\\S]+.[\\S]+$')\ndef valid_verify(pd1, pd2):\n\treturn pd1 == pd2\n\ndef valid_email(email):\n\tif not email:\n\t\treturn True\n\telse:\n\t\treturn re_email.match(email)\n\ndef check_signup_form(form):\n\tfor item in form.items():\n\t\tprint(item)\n\tvalid = True\n\n\tif not valid_username( form['username'] ):\n\t\tform['username_error'] = 'Invalid username'\n\t\tvalid = False\n\tif not valid_email( form['email'] ):\n\t\tform['email_error'] = 'Invalid email'\n\t\tvalid = False\n\tif not valid_verify( form['password'], form['verify'] ):\n\t\tform['verify_error'] = 'Password not matched'\n\t\tvalid = False\n\telif not valid_password( form['password'] ):\n\t\tform['password_error'] = 'Invalid password'\n\t\tvalid = False\n\n\tif valid:\n\t\tform['valid'] = valid\n\telse:\n\t\tform['password'] = form['verify'] = ''\n\treturn form\n\n"
},
{
"alpha_fraction": 0.6685015559196472,
"alphanum_fraction": 0.6813455820083618,
"avg_line_length": 26.694915771484375,
"blob_id": "49eb27c584b931420bd3eeee833400512349f7a2",
"content_id": "cf6d9c41d137668bfd25d11fa1141e9dc212333c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1635,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 59,
"path": "/udacity/cs253/ProblemSet02/Quiz2_SignUp/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\nfrom flask import Flask, request, redirect, url_for\nimport Page, Check\n\napp = Flask(__name__)\n\[email protected]('/', methods=['GET'])\ndef GetRequestHandler():\n\treturn Page.make_page()\n\ndef check_login_form(form):\n\tus_username = form['username']\n\tus_pd1 = form['password']\n\tus_pd2 = form['verify']\n\tus_email = form['email']\n\tprint('%s\\t%s\\t%s\\t%s\\n' % (us_username, us_pd1, us_pd2, us_email) )\n\n\tchecked_form = Page.get_default_form_args()\n\tchecked_form['username'] = us_username\n\tchecked_form['email'] = us_email\n\tvalid = True\n\n\tif not Check.valid_username(us_username):\n\t\tchecked_form['username_error'] = 'Invalid username'\n\t\tvalid = False\n\tif not Check.valid_email(us_email):\n\t\tchecked_form['email_error'] = 'Invalid email'\n\t\tvalid = False\n\n\tif not Check.valid_verify(us_pd1, us_pd2):\n\t\tchecked_form['verify_password_error'] = 'Password not matched'\n\t\tvalid = False\n\telif not Check.valid_password(us_pd1):\n\t\tchecked_form['password_error'] = 'Invalid password'\n\t\tvalid = False\n\n\tchecked_form['valid'] = valid\t\n\treturn checked_form\n\[email protected]('/', methods=['POST'])\ndef PostRequestHandler():\n\tform = request.form\n\tchecked_form = check_login_form(form)\n\n\tif checked_form.get('valid') == True:\n\t\treturn redirect(url_for('SuccessLoginHandler', username=form['username']))\n\telse:\n\t\tchecked_form.pop('valid')\n\t\treturn Page.make_page(checked_form)\n\[email protected]('/welcome', methods=['GET'])\ndef SuccessLoginHandler():\n\tusername = request.args['username']\n\treturn 'Welcome, %s!' % username\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n\t# app.run(port=8080, host='0.0.0.0', debug=False)\n\n"
},
{
"alpha_fraction": 0.4899328947067261,
"alphanum_fraction": 0.4966442883014679,
"avg_line_length": 26.18181800842285,
"blob_id": "89e34bef7f864aeb375389d176692b1bf759b12b",
"content_id": "114bffef9d3be925ef958fc61689f3c45625a52f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 298,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 11,
"path": "/udacity/cs253/Lesson02a_Templates/ROT13.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "def encode(text, offset=13):\n\ts = ''\n\tfor i in range(len(text)):\n\t\tif text[i].isalpha():\n\t\t\tif (text[i] >= 'a' and text[i] <= 'm') or (text[i] >= 'A' and text[i] <= 'M'):\n\t\t\t\ts = s + chr( ord(text[i]) + offset )\n\t\t\telse:\n\t\t\t\ts = s + chr( ord(text[i]) - offset )\n\t\telse:\n\t\t\ts = s + text[i]\n\treturn s"
},
{
"alpha_fraction": 0.8111454844474792,
"alphanum_fraction": 0.8452012538909912,
"avg_line_length": 28.454545974731445,
"blob_id": "ca440577f9ea7fefa6b5d40d1b8044dad3ccbe8d",
"content_id": "c6b8dbbf6db6ba3e2753dd179c1fc90b51255f8a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 593,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 11,
"path": "/vuejs/demo/fizzbuzz/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "## FizzBuzz\n\nUdacity cs253介绍的一个小程序,就是输入一个正整数n,输出一个从1到n的序列,如果index能被3整除,它的值就是Fizz,如果能被5整除就是Buzz,如果能同时被3和5整除,那就输出FizzBuzz\n\n之前在[这里](https://github.com/jJayyyyyyy/network/tree/master/udacity/cs253)的实现方式是,在client输入,提交给server,服务器算好一个list之后,render到一个html,再返回这个页面\n\n这个demo就是把这一来一回给省略了,直接在前端完成计算和render\n\n## TODO\n\n* transition过渡效果"
},
{
"alpha_fraction": 0.7014492750167847,
"alphanum_fraction": 0.7014492750167847,
"avg_line_length": 27.75,
"blob_id": "d9bc086c63b6c81cf2659b6b7e6454abc81cb300",
"content_id": "81e48c34e16cdc7bd967cd30a89c68386f6095f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 345,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 12,
"path": "/udacity/cs253/Integration/_01-03_/DS.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "class Post(object):\n\tdef __init__(self, post_id, update_date, subject, content):\n\t\tself.post_id = post_id\n\t\tself.update_date = update_date\n\t\tself.subject = subject\n\t\tself.content = content\n\nclass Artwork(object):\n\tdef __init__(self, artwork_id, subject, content):\n\t\tself.artwork_id = artwork_id\n\t\tself.subject = subject\n\t\tself.content = content\n"
},
{
"alpha_fraction": 0.6490066051483154,
"alphanum_fraction": 0.6490066051483154,
"avg_line_length": 29.399999618530273,
"blob_id": "98c525e7a2536d738ff0b08b9fe884e4a1c79531",
"content_id": "b00f22ddf078f48ca2d106dab81810bcb2ce83ba",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 151,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 5,
"path": "/udacity/cs253/Lesson03_Databases/Art.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "class ArtWork(object):\n\tdef __init__(self, art_id='', subject='', content=''):\n\t\tself.art_id = art_id\n\t\tself.subject = subject\n\t\tself.content = content"
},
{
"alpha_fraction": 0.5232558250427246,
"alphanum_fraction": 0.5581395626068115,
"avg_line_length": 11.285714149475098,
"blob_id": "060cbdea83143cd9b18f45309c68898468a8f88d",
"content_id": "fb49df81bf4a4da48434b349a752259b43c54d87",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 86,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 7,
"path": "/transport_layer/udp/udp_python/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "*\tpython3.4-based\n\n*\tAF_INET ---> IPv4\n\n*\tSOCK_DGRAM ---> UDP\n\n*\tSOCK_STREAM ---> TCP\n"
},
{
"alpha_fraction": 0.6989489197731018,
"alphanum_fraction": 0.7079579830169678,
"avg_line_length": 25.117647171020508,
"blob_id": "6e8497c3251c2c70f5a2adf7835693c3b6bcc74a",
"content_id": "9f9dd145557755e3edba4d24c0d31119097a4f53",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1430,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 51,
"path": "/application_layer/http/simple_client/simple_server.c",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <string.h>\n#include <stdlib.h>\n#include <sys/types.h>\n#include <sys/socket.h>\n#include <netinet/in.h>\n#include <arpa/inet.h>\n#include <netdb.h>\n\nint main(int argc, char *argv[]){\n\tchar recvbuf[BUFSIZ];\n\tchar sendbuf[] = \"GET / HTTP/1.1\\r\\nHost: github.com\\r\\nConnection: close\\r\\n\\r\\n\";\n\tchar * hostname = \"github.com\";\n\tsize_t lenSockaddr;\n\tint sendlen = strlen(sendbuf), recvlen, i;\n\tint clientSockfd;\n\n\t// 服务器端网络地址结构体\n\tstruct sockaddr_in serverSockaddr_in;\n\tstruct sockaddr * serverSockaddr;\n\n\t// 数据初始化清零\n\tmemset( &serverSockaddr_in, 0, sizeof(serverSockaddr_in) );\n\n\t// IPv4\n\tserverSockaddr_in.sin_family = AF_INET;\n\n\t// https://www.gnu.org/software/libc/manual/html_node/Inet-Example.html\n\tstruct hostent * hostInfo = gethostbyname(hostname);\n\tserverSockaddr_in.sin_addr = * (struct in_addr *)hostInfo->h_addr;\n\n\t// 服务器端口号\n\tserverSockaddr_in.sin_port = htons(80);\n\n\t/*创建客户端套接字--IPv4协议,面向连接通信,TCP协议*/\n\tclientSockfd = socket(AF_INET, SOCK_STREAM, 0);\n\n\tserverSockaddr = (struct sockaddr *)(&serverSockaddr_in);\n\tlenSockaddr = sizeof(struct sockaddr);\n\tconnect( clientSockfd, serverSockaddr, lenSockaddr );\n\tsend(clientSockfd, sendbuf, sendlen, 0);\n\n\trecvlen = recv(clientSockfd, recvbuf, BUFSIZ, 0);\n\trecvbuf[recvlen] = 0;\n\tprintf(\"%s\\n\", recvbuf);\n\n\t/*关闭套接字*/\n\tclose(clientSockfd);\n \n\treturn 0;\n}\n"
},
{
"alpha_fraction": 0.7003890872001648,
"alphanum_fraction": 0.7003890872001648,
"avg_line_length": 23.5238094329834,
"blob_id": "fa8024e73a6803f19bb4286f7f1f37851f67055f",
"content_id": "9e3f0ba8c4f90a3c397c8d0ba1a2a3b0ce485cb5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 514,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 21,
"path": "/udacity/cs253/Lesson03_Databases/Page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import render_template\nfrom Art import ArtWork\n\ndef fill_template(name='index', **kw):\n\tfilename = name + '.html'\n\treturn render_template(filename, **kw)\n\ndef render_index():\n\tpage = fill_template('index')\n\treturn page\n\ndef render_art(error='', record_list=[]):\n\tif error:\n\t\tpage = fill_template('art', error=error)\n\telse:\n\t\tartwork_list = []\n\t\tfor record in record_list:\n\t\t\tartwork = ArtWork(*record)\n\t\t\tartwork_list.append(artwork)\n\t\tpage = fill_template('art', artwork_list=artwork_list)\n\treturn page"
},
{
"alpha_fraction": 0.7965654134750366,
"alphanum_fraction": 0.803170382976532,
"avg_line_length": 53.07143020629883,
"blob_id": "59628c1a847d219ada2d3f62132728fd712bfe78",
"content_id": "630d128f62186aab7272c44b35ada2452609efd6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1027,
"license_type": "no_license",
"max_line_length": 224,
"num_lines": 14,
"path": "/udacity/cs253/Integration/_01-02_/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "此前使用的填充html的方法叫做**string substitution**, 但是在面对更大的html时,这种替换方法(可能)会变得比较麻烦。\n\n下一期将使用jinja2, 这个引擎可以帮助我们更好更方便地完成上述功能。\n\nTemplates are a way to organize your html in a way that's easier than string substitution. String substitution can get a little hairy once you get very large html files.\n\nJinja2 is one of the templating engines. And it is basically glorified string substitution except that it helps you handle separating those out into multiple files and folders so that you dont have to worry about it as much.\n\n*\t[Primer on Jinja Templating](https://realpython.com/blog/python/primer-on-jinja-templating/)\n*\t[Jinja2](http://jinja.pocoo.org/)\n\n*\tPS:\n\n\t初步研究了Jinja2之后,发现我们的替换思路很像,我的填充方法是Page.py里面的`fill_template()`,不过,很显然Jinja2是一个更好的引擎,而且有更多其他功能,所以我们不用自己造轮子啦~\n"
},
{
"alpha_fraction": 0.6689007878303528,
"alphanum_fraction": 0.7050938606262207,
"avg_line_length": 20.314285278320312,
"blob_id": "739ab014c601d3a1f441613c842fcedb83192144",
"content_id": "9ee4c3196f3ec89b834d643e0f28e96369cae206",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1072,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 35,
"path": "/udacity/cs253/Lesson03_Databases/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "##\tIntro\n\n1.\t1-16小节的测试代码放在`concept_01_16`文件夹内\n\n2.\t首先在命令行运行python3,然而导入并运行`main.py`中的`init_db()`,这样就建立了名为`ascii_art.db`的数据库文件,根据`schemal.sql`,里面有一个叫做`entries`的`table`\n\n\t```bash\n\t$ python3\n\t>>> from main import init\n\t>>>\tinit_db()\n\t```\n\n3.\t可以通过`test_db.py`对刚才建立的数据库进行测试,包括\n\n\t*\t添加新记录\n\t*\t查询已有记录\n\t*\t删除记录\n\t*\t重新初始化\n\n4.\t`main.py`通过Flask和sqlite3实现了Lesson3的剩余功能,即可以添加一条记录,内容包括了[id, title, art]\n\n5.\t不可能一次达到完美的,前端后端都是,总会经历多次重构。不要纠结,不要OCD\n\n##\tTODO\n\n*\tGAE版本\n*\t重构(optional)\n\n##\tReference\n\n*\t[udacity](https://classroom.udacity.com/courses/cs253)\n*\t[w3school SQL](https://www.w3schools.com/sql/)\n*\t[flask SQLite](http://flask.pocoo.org/docs/0.12/patterns/sqlite3/)\n*\t[ascii art](http://chris.com/ascii/index.php)\n*\t[w3school CSS](https://www.w3schools.com/css/)\n"
},
{
"alpha_fraction": 0.5656716227531433,
"alphanum_fraction": 0.5880597233772278,
"avg_line_length": 17.61111068725586,
"blob_id": "0edb0fe2ed8bba4c37d2e74ade8b0aae41ad7e74",
"content_id": "46b42f893876332e7cde33fbc000dd7f0d3755c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 670,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 36,
"path": "/udacity/cs253/Lesson02_-27-50-_FormsAndInput/CheckDate.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\nmonths = [ 'January',\n\t\t\t'Feburary',\n\t\t\t'March',\n\t\t\t'April',\n\t\t\t'May',\n\t\t\t'June',\n\t\t\t'July',\n\t\t\t'August',\n\t\t\t'September',\n\t\t\t'October',\n\t\t\t'December']\n\nmonth_abbvs = dict( (m[:3].lower(), m) for m in months )\n\ndef valid_month(month):\n\tif month:\n\t\tvalid_m = month_abbvs.get( month[:3].lower() )\n\t\tif valid_m:\n\t\t\treturn valid_m\n\t# return \"Error Month\"\n\ndef valid_day(day):\n\tif day and day.isdigit():\n\t\tday = int(day)\n\t\tif day > 0 and day <= 31:\n\t\t\treturn day\n\t# return \"Error Day\"\n\ndef valid_year(year):\n\tif year and year.isdigit():\n\t\tyear = int(year)\n\t\tif year > 1900 and year < 2020:\n\t\t\treturn year\n\t# return \"Error Year\"\n"
},
{
"alpha_fraction": 0.7473683953285217,
"alphanum_fraction": 0.7473683953285217,
"avg_line_length": 16.363636016845703,
"blob_id": "b604ea9f5b6eff942c7cef54c72b360b5246926c",
"content_id": "4831dd540944478d8603c5bc69230c05bb31d086",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 190,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 11,
"path": "/udacity/cs253/Lesson02a_Templates/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "##\tTemplates\n\n*\tseparate different types of code\n*\tmake more readable code\n*\tmore secure website\n*\thtml that is easier to modify\n\n##\tTips\n\n*\tminimize code in template\n*\tminimize html in code"
},
{
"alpha_fraction": 0.725568950176239,
"alphanum_fraction": 0.7402945160865784,
"avg_line_length": 43,
"blob_id": "d8124001f671d04e22f5399ecd5f6d44e8fc98fd",
"content_id": "4cd70b38406a0045b537662e628af4bf5838cd9a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 747,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 17,
"path": "/udacity/cs253/Integration/_01-03_/static/artwork_toggle.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "function toggle_bg(e){\n\t//Checking if select field is enabled\n\tif (document.getElementById(\"btn\").value==\"Dark on Light\"){\n\t\t//Change the select field state to disabled and changing the value of button to enable\n\t\tdocument.getElementById(\"btn\").value=\"Light on Dark\";\n\t\tdocument.getElementById(\"artwork_list\").style.background = \"#FFFFFF\";\n\t\tdocument.getElementById(\"artwork_list\").style.color = \"#000000\";\n\t}\n\n\t//Checking if select field is disabled\n\telse {\n\t\t//Change the select field state to enabled and changing the value of button to disable\n\t\tdocument.getElementById(\"btn\").value=\"Dark on Light\";\n\t\tdocument.getElementById(\"artwork_list\").style.background = \"#2A4767\";\n\t\tdocument.getElementById(\"artwork_list\").style.color = \"#FFFFFF\";\n\t}\n}"
},
{
"alpha_fraction": 0.7193789482116699,
"alphanum_fraction": 0.7349051237106323,
"avg_line_length": 44.76315689086914,
"blob_id": "e7dcc9a17a5cf3014f7c35272aa1da45f5d7c1c6",
"content_id": "a5b8bb0f41d4da4b6a3ab5f5d9cdc0dfb2e4ab91",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1739,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 38,
"path": "/website/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import Flask, request, redirect, url_for\nfrom rot13 import ROT13Handler\nfrom fizzbuzz import FizzBuzzHandler\nfrom index import IndexHandler\nfrom welcome import WelcomeHandler\nfrom artwork import AsciiArtHandler\nfrom database import init_db\nfrom blog import BlogIndexHandler, NewBlogpostHandler, BlogPostHandler\nfrom user import SignupHandler, SigninHandler, SignoutHandler\nfrom nba import NBAHandler\n\n# create app\napp = Flask(__name__)\n\n# $ export FLASK_APP=main\n# $ python3 -m flask init\[email protected]('ini')\ndef init():\n\tinit_db()\n# or add the next line before app.run\n# app.cli.command('ini')(database.init_db)\n\nif __name__ == '__main__':\n\tapp.add_url_rule('/', view_func=IndexHandler.as_view('index'))\n\tapp.add_url_rule('/rot13', view_func=ROT13Handler.as_view('rot13'))\n\tapp.add_url_rule('/fizzbuzz', view_func=FizzBuzzHandler.as_view('fizzbuzz'))\n\tapp.add_url_rule('/welcome', view_func=WelcomeHandler.as_view('welcome'))\n\tapp.add_url_rule('/ascii_art', view_func=AsciiArtHandler.as_view('ascii_art'))\n\tapp.add_url_rule('/blog', view_func=BlogIndexHandler.as_view('blog_index'))\n\tapp.add_url_rule('/blog/new', view_func=NewBlogpostHandler.as_view('new_blogpost'))\n\tapp.add_url_rule('/blog/<int:id>', view_func=BlogPostHandler.as_view('blogpost'))\n\t# app.add_url_rule('/blog/edit/<int:id>', view_func=EditPostHandler.as_view('edit_post'))\n\tapp.add_url_rule('/signup', view_func=SignupHandler.as_view('signup'))\n\tapp.add_url_rule('/signin', view_func=SigninHandler.as_view('signin'))\n\tapp.add_url_rule('/signout', view_func=SignoutHandler.as_view('signout'))\n\tapp.add_url_rule('/nba', view_func=NBAHandler.as_view('nba'))\n\tapp.run(port=8080, host='0.0.0.0', debug=True)\n\t# app.run(port=8080, host='0.0.0.0', debug=False)\n"
},
{
"alpha_fraction": 0.6642441749572754,
"alphanum_fraction": 0.6758720874786377,
"avg_line_length": 25.461538314819336,
"blob_id": "f0284259c2b8666d69df38b2e353a47ed404f310",
"content_id": "78fc23f63026f973a403f74e217418e5d5b3f15a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 688,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 26,
"path": "/udacity/cs253/Integration/_01-04_/fizzbuzz.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\ndef get_fizzbuzz_list(fizzbuzz_strn, limit=30):\n\tfizzbuzz_list = []\n\tif fizzbuzz_strn and fizzbuzz_strn.isdigit():\n\t\tfizzbuzz_n = int(fizzbuzz_strn)\n\t\tif fizzbuzz_n > limit:\n\t\t\tfizzbuzz_n = limit\n\t\tfor i in range(1, fizzbuzz_n + 1):\n\t\t\tif not i % 15:\n\t\t\t\titem = 'FizzBuzz'\n\t\t\telif not i % 3:\n\t\t\t\titem = 'Fizz'\n\t\t\telif not i % 5:\n\t\t\t\titem = 'Buzz'\n\t\t\telse:\n\t\t\t\titem = str(i)\n\t\t\tfizzbuzz_list.append(item)\n\treturn fizzbuzz_list\n\nclass FizzBuzzHandler(Page):\n\tfilename = 'fizzbuzz.html'\n\tdef get(self):\n\t\tfizzbuzz_strn = self.get_args('fizzbuzz_strn')\n\t\tfizzbuzz_list = get_fizzbuzz_list(fizzbuzz_strn)\n\t\treturn self.render(self.filename, fizzbuzz_list=fizzbuzz_list)\n"
},
{
"alpha_fraction": 0.6426116824150085,
"alphanum_fraction": 0.6975945234298706,
"avg_line_length": 28.100000381469727,
"blob_id": "725cf97f7562ba3471ac3fff53cf9cfd1fecedbe",
"content_id": "9975516b6f5cd21e7a9824edb21a2b63d81f4422",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 291,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 10,
"path": "/transport_layer/tcp/client.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "# https://docs.python.org/3/library/socket.html#example\nimport socket\n\nHOST = '127.0.0.1'\nPORT = 8888\nwith socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n\ts.connect((HOST, PORT))\n\ts.sendall('Hello, world'.encode('utf-8'))\n\tdata = s.recv(1024)\nprint('msg from server: ', repr(data))\n"
},
{
"alpha_fraction": 0.7074057459831238,
"alphanum_fraction": 0.7251249551773071,
"avg_line_length": 12.842767715454102,
"blob_id": "58dca38d7a3aab61b1d49f6f73232e4a3fc075bf",
"content_id": "8d32ec053f598a4e671a5e3c82a6e8a7e635ceef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3651,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 159,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/TODO.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "1.\t学习与练手\n\n\t用参数实现redirect\n\n\t流程:\n\n\t*\t未登录状态,GET请求/blog/new\n\t*\tserver的NewBlogpostHandler的get()方法,对请求进行redirect,并添加query参数,转向/signin?redirect=/blog/new\n\t*\t在SigninHandler进行透传,因为url会保留query参数\n\n\n\n\n\t发布new blogpost的时候,如果还未登录,就转到/signin,登录成功后,转到/blog/new\n\n\t也就是,要进入/blog/new,必须是登录状态,有合法cookie,否则会被redirect到/signin,举个例子,直接在url输入/blog/new,会被转回来。GET和POST都会被转回来\n\n\t填写subject和content,两者非空则提交json。否则提示Invalid,而且前端不提交。\n\n\t后端也要检查是否为空。如果合法,则插入数据库。然后转回/blog。如果不合法,就直接转回/blog\n\n\t因此,new要用form.submit=prevent,\n\n\t换用required,更简单,一步到位\n\n\n\n为什么vuejs中,local filters无法获得this?\n\n学习中遇到的问题,改造的场景如下:\n\n```html\n<div id=\"app\">\n\t<p>{{ 1 | f1 }}</p>\n\t<p>{{ 1 | f2 }}</p>\n\t<p>{{ a | f3 }}</p>\n</div>\n```\n\n```javascript\nvar app = new Vue({\n\tel: '#app',\n\tdata: {\n\t\ta: 2\n\t},\n\tfilters:{\n\t\tf1: function(id){\n\t\t\treturn this.a; // undefined\n\t\t},\n\t\tf2: function(id){\n\t\t\treturn id; \t// 1\n\t\t},\n\n\t\tf3: function(id){\n\t\t\treturn id; // 2\n\t\t}\n\t}\n})\n```\n\n每个p的值写在三个注释里了,只有第一个p是空的,而且一直是空的,说明并不会动态更新。如果在f1中用console.log(this),会得到window,但是在其他的options里面定义的函数却可以获得Vue的实例app的this呢?\n\nps:如果在f1中加上console.log(this.app),则会得到undefined,个人猜测此时可能还没有生成app,难道是local filters在Vue的lifecycle之前?\n\n如果不能获得this,那为什么还要把filters分为local和global两类?\n\nps2: 看了文档Filters — Vue.js ,似乎没有发现相关说明。\n\n\n1.\n\nindex + new 结合,类似artwork的add\n\nblogpost + edit 结合,类似artwork的update\n\n通过前端直接增减dom\n\n而不是分成多个页面\n\n\n2. index的每一个标题只是url,点击后,flask只返回post.html模版,如何给前端通知id?\n\n 因为只有一个通用模版,前端收到后,根据另外的参数——id,再去请求json数据,即每篇blogpost的实际subject, content,再填充到模版里面\n\n 对于'https://example.com/abc/1'\n\n 通过js,window.location.pathname,可以获得'/abc/1',在通过split得到id\n\n3. 点击button,进行post,如果cookie不合法,则激活modal进行登录,否则就完成了提交,然后更新dom\n\n\n4. index+new也用类似方法,完成前端的逻辑\n\n blog相当于artwork拆分成两个部分\n\n\n\n\n\n\n\n\n客户端的rss应用, 跨域,还是需要反向代理,要么是通过自己的server,要么通过如下方式\n\nhttp://blog.csdn.net/yw39019724/article/details/20624781\n\n使用google或者yahoo之类的网站提供的api\n\n\n慎用app.js\n\n\nSB天猫超市,必须设置允许第三方cookie,否则会被判断为网络环境异常,而且怎么输如验证码都没用\n\n\n\n\n\n\n1.\t后端发送raw\n\n\t保存db的同时,保存json\n\t\n\tvar artwork = {\n\t\tid: '',\n\t\tsubject: '',\n\t\tcontent: ''\n\t}\n\n\t添加url_rule,使得.json能够得到访问\n\n2.\t前端,基本页面不变\n\n\t加入vue,请求json\n\n\tbutton:new artwork\n\n\t---> 出现subject content\n\n\t\t\t都有内容后才显示提交\n\n\t\t\tform附加:类型:add\n\n\n\n\n\n\n\n\n\n\nbutton: update artwork\n\n---> 出现 update artwork id,subject, content\n\n\t\t都有内容后才显示提交\n\n\t\tform附加:类型:update\n"
},
{
"alpha_fraction": 0.7216138243675232,
"alphanum_fraction": 0.730259358882904,
"avg_line_length": 37.55555725097656,
"blob_id": "c7525b31f18ae8e540b17c35a581c0c52f43c43e",
"content_id": "b9c022c42b63145265502e756b3919b4012b41e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1735,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 45,
"path": "/udacity/cs253/Integration/_01-04_/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import Flask, request, redirect, url_for\nfrom rot13 import ROT13Handler\nfrom fizzbuzz import FizzBuzzHandler\nfrom index import IndexHandler\nfrom welcome import WelcomeHandler\nfrom artwork import AsciiArtHandler\nfrom database import init_db\nfrom blog import BlogIndexHandler, NewPostHandler, GetPostHandler, EditPostHandler\nfrom user import SignupHandler, SigninHandler, SignoutHandler\n\n# create app\napp = Flask(__name__)\n\n\n# $ export FLASK_APP=main\n# $ python3 -m flask init\[email protected]('ini')\ndef init():\n\tinit_db()\n# or add the next line before app.run\n# app.cli.command('ini')(database.init_db)\n\n# class Signup(MethodView):\n# \tdef get(self):\n# \t\tsignup = page.Signup()\n# \t\treturn signup.render()\n\n# \tdef post(self):\n# \t\treturn 'pass'\n\n\nif __name__ == '__main__':\n\tapp.add_url_rule('/', view_func=IndexHandler.as_view('index'))\n\tapp.add_url_rule('/rot13', view_func=ROT13Handler.as_view('rot13'))\n\tapp.add_url_rule('/fizzbuzz', view_func=FizzBuzzHandler.as_view('fizzbuzz'))\n\tapp.add_url_rule('/welcome', view_func=WelcomeHandler.as_view('welcome'))\n\tapp.add_url_rule('/ascii_art', view_func=AsciiArtHandler.as_view('ascii_art'))\n\tapp.add_url_rule('/blog', view_func=BlogIndexHandler.as_view('blog_index'))\n\tapp.add_url_rule('/blog/new_post', view_func=NewPostHandler.as_view('new_post'))\n\tapp.add_url_rule('/blog/<int:post_id>', view_func=GetPostHandler.as_view('post'))\n\tapp.add_url_rule('/blog/edit/<int:post_id>', view_func=EditPostHandler.as_view('edit_post'))\n\tapp.add_url_rule('/signup', view_func=SignupHandler.as_view('signup'))\n\tapp.add_url_rule('/signin', view_func=SigninHandler.as_view('signin'))\n\tapp.add_url_rule('/signout', view_func=SignoutHandler.as_view('signout'))\n\tapp.run(port=8000, debug=True)\n"
},
{
"alpha_fraction": 0.5804794430732727,
"alphanum_fraction": 0.5804794430732727,
"avg_line_length": 19.13793182373047,
"blob_id": "13cd457a9c2a85d02d5d65ddf6d9799f4df74d01",
"content_id": "7db7e34eb6bb1fc22748fc95fd1a2725f28c6c0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 584,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 29,
"path": "/udacity/cs253/Lesson02_-27-50-_FormsAndInput/Form.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from html import escape\n\nform = '''\\\n<form method=\"post\">\n\tWhat is your birthday?\n\t<br><br>\n\t<label>\n\t\tmonth\n\t\t<input type=\"text\" name=\"month\" value=\"%(month)s\">\n\t</label>\n\t<label>\n\t\tday\n\t\t<input type=\"text\" name=\"day\" value=\"%(day)s\">\n\t</label>\n\t<label>\n\t\tyear\n\t\t<input type=\"text\" name=\"year\" value=\"%(year)s\">\n\t</label>\n\t<div style=\"color: red\">%(error)s</div>\n\t<br><br>\n\t<input type=\"submit\">\n</form>\n'''\n\ndef make_form(error='', month='', day='', year=''):\n\treturn form % { 'error': escape(error),\n\t\t\t\t\t'month': escape(month),\n\t\t\t\t\t'day': escape(day),\n\t\t\t\t\t'year': escape(year)}\n"
},
{
"alpha_fraction": 0.6437054872512817,
"alphanum_fraction": 0.6722090244293213,
"avg_line_length": 20.049999237060547,
"blob_id": "787262f10b4d1c20d1694c52acefcfc53bca81cd",
"content_id": "4fb347b3dd4cc8d0cef7649d2d7b93e8279beeb4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 421,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 20,
"path": "/udacity/cs253/ProblemSet02/Quiz2_SignUp/Check.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import re\n\nre_username = re.compile(r'^[a-zA-Z0-9_-]{3,20}$')\nre_password = re.compile(r'^.{3,20}$')\nre_email = re.compile(r'^[\\S]+@[\\S]+.[\\S]+$')\n\ndef valid_username(username):\n\treturn re_username.match(username)\n\ndef valid_password(password):\n\treturn re_password.match(password)\n\ndef valid_verify(pd1, pd2):\n\treturn pd1 == pd2\n\ndef valid_email(email):\n\tif not email:\n\t\treturn True\n\telse:\n\t\treturn re_email.match(email)\n"
},
{
"alpha_fraction": 0.6311688423156738,
"alphanum_fraction": 0.6701298952102661,
"avg_line_length": 23.0625,
"blob_id": "10d9ff64f1768eb35bef782f65e7d35bda9e7bd5",
"content_id": "859da341f1f1a90449613c4689467f9e0ce5981f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 385,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 16,
"path": "/transport_layer/tcp/server_with_echo.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "# https://docs.python.org/3/library/socket.html#example\nimport socket\nserver_host = '0.0.0.0'\nport = 8888\n\nwith socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n\ts.bind((server_host, port))\n\ts.listen(1)\n\tconn, addr = s.accept()\n\twith conn:\n\t\tprint('Client: ', addr)\n\t\twhile True:\n\t\t\tdata = conn.recv(1024)\n\t\t\tif not data:\n\t\t\t\tbreak\n\t\t\tconn.sendall('received\\n'.encode('utf-8'))\n"
},
{
"alpha_fraction": 0.7042253613471985,
"alphanum_fraction": 0.7042253613471985,
"avg_line_length": 34.5,
"blob_id": "393178ebeb28c06307288bc437bc6cf12216d33d",
"content_id": "e751db9214c33705b05e151dabd8cafd41009a22",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 213,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 6,
"path": "/udacity/cs253/ProblemSet03/post.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "class Post(object):\n\tdef __init__(self, post_id, post_date, post_subject, post_content):\n\t\tself.post_id = post_id\n\t\tself.post_date = post_date\n\t\tself.post_subject = post_subject\n\t\tself.post_content = post_content\n"
},
{
"alpha_fraction": 0.630821943283081,
"alphanum_fraction": 0.6445205211639404,
"avg_line_length": 27.096153259277344,
"blob_id": "9a81d9b05521374f81676a67c86e47d45c1a7e80",
"content_id": "2a1dff645e8d9deb140d485152c299aa1eec9ae3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1460,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 52,
"path": "/transport_layer/udp/udp_c_linux/udp_client.c",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <sys/types.h>\n#include <sys/socket.h>\n#include <netinet/in.h>\n#include <netdb.h> /* netbd.h is needed for struct hostent =) */\n\n#define PORT 9000 /* Open Port on Remote Host */\n#define MAXDATASIZE 100 /* Max number of bytes of data */\n\nint main(int argc, char *argv[])\n{\n\tint fd; /* files descriptors */\n\tchar sendbuf[MAXDATASIZE];\n\tstruct hostent *he; /* structure that will get information about remote host */\n\tstruct sockaddr_in server,client; /* server's address information */\n\n\tif (argc !=2) { /* this is used because our program will need two argument (IP address and a message */\n\t\tprintf(\"Usage: %s \\n\",argv[0]);\n\t\texit(1);\n\t}\n\n\tif ((he=gethostbyname(argv[1]))==NULL){ /* calls gethostbyname() */\n\t\tprintf(\"gethostbyname() error\\n\");\n\t\texit(1);\n\t}\n\n\tif ((fd=socket(AF_INET, SOCK_DGRAM, 0))==-1){ /* calls socket() */\n\t\tprintf(\"socket() error\\n\");\n\t\texit(1);\n\t}\n\n\tbzero(&server,sizeof(server));\n\tserver.sin_family = AF_INET;\n\tserver.sin_port = htons(PORT); /* htons() is needed again */\n\tserver.sin_addr = *((struct in_addr *)he->h_addr); /*he->h_addr passes \"*he\"'s info to \"h_addr\" */\n\n\tsocklen_t len;\n\tlen=sizeof(struct sockaddr_in);\n\n\twhile (1) {\n\t\tprintf(\"input message:\");\n\t\tscanf(\"%s\", sendbuf);\n\t\tsendto(fd, sendbuf, strlen(sendbuf), 0, (struct sockaddr *)&server, len);\n\t\tif(sendbuf[0]=='q' && sendbuf[1]=='\\0')\n\t\t\tbreak;\n\t}\n\n\tclose(fd); /* close fd */\n}"
},
{
"alpha_fraction": 0.5046554803848267,
"alphanum_fraction": 0.517690896987915,
"avg_line_length": 18.214284896850586,
"blob_id": "ff43877efcdb6e6351208644b9b4c61f5ec10acd",
"content_id": "55386d5ebcb0d70cdc67066b8c7d8ac8d629e95b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 537,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 28,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/rot13/rot13.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var reLow = /[a-mA-M]/;\nvar reHigh = /[n-zN-Z]/;\n\nvar app = new Vue({\n\tel: '#rot13',\n\tdata: {\n\t\ttext: ''\n\t},\n\tcomputed: {\n\t\tencoded: function (){\n\t\t\tvar newStr = [];\n\t\t\tvar len = this.text.length;\n\t\t\tfor(let i=0; i < len; i++ ){\n\t\t\t\tvar ch = this.text[i];\n\t\t\t\tif( reLow.test(ch) ){\n\t\t\t\t\tch = ch.charCodeAt() + 13;\n\t\t\t\t\tch = String.fromCharCode(ch);\n\t\t\t\t}else if( reHigh.test(ch) ){\n\t\t\t\t\tch = ch.charCodeAt() - 13;\n\t\t\t\t\tch = String.fromCharCode(ch);\n\t\t\t\t}\n\t\t\t\tnewStr.push(ch);\n\t\t\t}\n\t\t\tnewStr = newStr.join('');\n\t\t\treturn newStr\n\t\t}\n\t}\n})"
},
{
"alpha_fraction": 0.7739726305007935,
"alphanum_fraction": 0.7739726305007935,
"avg_line_length": 23.33333396911621,
"blob_id": "4d1f59e78a06d478aed8737725256b8cd9f29acd",
"content_id": "32f552aa3332828fb657174b11cff2a4bac4195d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 146,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 6,
"path": "/udacity/cs253/Lesson03_Databases/schema.sql",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "drop table if exists entries;\ncreate table entries (\n\tart_id integer primary key autoincrement,\n\tsubject text not null,\n\tcontent text not null\n);\n"
},
{
"alpha_fraction": 0.6438848972320557,
"alphanum_fraction": 0.6906474828720093,
"avg_line_length": 24.272727966308594,
"blob_id": "48e756a9cc7efb35053fabe832f0d7f428bb89fd",
"content_id": "a9f102c11dad7c7400636bb03053ccfcf08c7fad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 278,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 11,
"path": "/application_layer/http/simple_server/simple_server_2.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from wsgiref.simple_server import make_server\n\ndef application(environ, start):\n\tcontent = [b'<h1>Hello, world!</h1>']\n\tstart('200 OK', [('Content-Type', 'text/html')])\n\treturn content\n\nip = '0.0.0.0'\nport = 8000\nhttpd = make_server(ip, port, application)\nhttpd.serve_forever()\n"
},
{
"alpha_fraction": 0.7566371560096741,
"alphanum_fraction": 0.7876105904579163,
"avg_line_length": 49.22222137451172,
"blob_id": "bb5f2f9b7c7f55de70a929fc8ffe9db8509ed447",
"content_id": "b44e0a40d1d8d3d1ec29c9e90bbf1c1d61839ad3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 464,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 9,
"path": "/transport_layer/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "##\t其他参考链接\n\n*\t[Linuxnet, lab1 exercise](https://github.com/jJayyyyyyy/USTC-2018-Smester-1/tree/master/LinuxNetworkProgramming/lab1)\n\n*\t[Linuxnet, lab2 exercise](https://github.com/jJayyyyyyy/USTC-2018-Smester-1/tree/master/LinuxNetworkProgramming/lab2)\n\n*\t[http server and client](https://github.com/jJayyyyyyy/network/tree/master/application_layer/http)\n\n*\t[tcp/udp server and client](https://github.com/jJayyyyyyy/network/tree/master/transport_layer)\n"
},
{
"alpha_fraction": 0.718367338180542,
"alphanum_fraction": 0.718367338180542,
"avg_line_length": 23.600000381469727,
"blob_id": "6301af4277d6b3fb8baaa355b637ef9b4119912c",
"content_id": "1ed741cf30427b24b3dafc0b5dabf41a98551185",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 245,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 10,
"path": "/udacity/cs253/Integration/_01-04_/welcome.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass WelcomeHandler(Page):\n\tfilename = 'welcome.html'\n\tdef get(self):\n\t\tusername = self.cookies().get('username')\n\t\tif username:\n\t\t\treturn self.render(self.filename, username=username)\n\t\telse:\n\t\t\treturn self.redirect('/')"
},
{
"alpha_fraction": 0.6245694756507874,
"alphanum_fraction": 0.6601607203483582,
"avg_line_length": 18.35555648803711,
"blob_id": "ab4e3c090df2bf47642dac9e7f395e75cf4db3fb",
"content_id": "84ee6a65ccc08da06b6ca1a8b890eb0ba74663c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 955,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 45,
"path": "/application_layer/http/simple_server/simple_server_1.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport socket, threading\n\nhttp_response = '''\\\nHTTP/1.1 200 OK\ncontent-type: text/html\\r\\n\\r\\n\\\n<h1>Hello world!</h1>\n'''\n\ndef get_socket():\n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\ts.bind(('0.0.0.0', 8080))\n\ts.listen(5)\n\tprint('Waiting for connection...')\n\treturn s\n\ndef tcp_link(sock, addr):\n\tprint('Accept new connection from %s:%s...' % addr)\n\tsock.send(http_response.encode('utf-8'))\n\tsock.close()\n\tprint('Connection from %s:%s closed.' % addr)\n\ndef main():\n\ts = get_socket()\n\twhile True:\n\t\tsock, addr = s.accept()\n\t\tt = threading.Thread(target=tcp_link, args=(sock, addr))\n\t\tt.start()\n\nmain()\n\n\n# 过程:\n#\n# 1. server监听ip:port\n# 2. 浏览器访问server的ip:port,建立socket连接\n# 3. server接受请求,建立连接\n# 4. 浏览器发起GET类型的http request\n# 5. server返回http response,内容包括\n#\t5.1 status line\n#\t5.2 headers\n#\t5.3 content\n# 6. 关闭socket\n"
},
{
"alpha_fraction": 0.6647531390190125,
"alphanum_fraction": 0.6716417670249939,
"avg_line_length": 25.393939971923828,
"blob_id": "ba7e302aff3b37e3ed8f6345c907f53a0a014b39",
"content_id": "c57bd27ab575d57c18326ae213fc88c3a3df6d14",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 871,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 33,
"path": "/udacity/cs253/Lesson02_-27-50-_FormsAndInput/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\nfrom flask import Flask, request, redirect, url_for\n# from flask import Response, Request\nimport CheckDate, Form\n\napp = Flask(__name__)\n\[email protected]('/', methods=['GET'])\ndef get():\n\treturn Form.make_form()\n\[email protected]('/', methods=['POST'])\ndef post():\n\tus_month = request.form['month']\n\tus_day = request.form['day']\n\tus_year = request.form['year']\n\n\ts_month = CheckDate.valid_month(us_month)\n\ts_day = CheckDate.valid_day(us_day)\n\ts_year = CheckDate.valid_year(us_year)\n\n\tif not ( s_month and s_day and s_year ):\n\t\treturn Form.make_form(error='Invalid Date', month=us_month, day=us_day, year=us_year)\n\telse:\n\t\treturn redirect(url_for('SuccessLoginHandler'))\n\[email protected]('/WelcomePage')\ndef SuccessLoginHandler():\n\treturn \"<div style=\\\"color: red\\\">Welcome!</div>\"\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n"
},
{
"alpha_fraction": 0.7120419144630432,
"alphanum_fraction": 0.7434554696083069,
"avg_line_length": 10.29411792755127,
"blob_id": "661f523df3298a82426b93298889aeb006d01661",
"content_id": "fba7557e6bf10bb6350de0ced2c91c92433488b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 275,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 17,
"path": "/application_layer/http/simple_server/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "##\t过程:\n\n1.\tserver监听ip:port\n\n2.\t浏览器访问server的ip:port,建立socket连接\n\n3.\tserver接受请求,建立连接\n\n4.\t浏览器发起GET类型的http request\n\n5.\tserver返回http response,内容包括\n\n\t*\tstatus line\n\t*\theaders\n\t*\tcontent\n\n6.\t关闭socket"
},
{
"alpha_fraction": 0.6443349719047546,
"alphanum_fraction": 0.6463054418563843,
"avg_line_length": 18.150943756103516,
"blob_id": "cde49640cde1cb96f16f0463174535d3ebaaa9b0",
"content_id": "22b33391e2d1b800da203f0581a1452b0f9df4d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1015,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 53,
"path": "/udacity/cs253/Lesson03_Databases/test_db.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import sqlite3, logging\ndb_name='ascii_art.db'\n\ndef do_sql(sql, args=''):\n\tres = None\n\ttry:\n\t\tconn = sqlite3.connect(db_name)\n\t\tcur = conn.cursor()\n\t\tcur.execute(sql, args)\n\t\tres = cur.fetchall()\n\t\tconn.commit()\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\tfinally:\n\t\tcur.close()\n\t\tconn.close()\n\treturn res\n\ndef insert(title, art):\n\tif title and art:\n\t\targs = (title, art)\n\t\tsql = 'insert into entries (title, art) values (?, ?)'\n\t\tdo_sql(sql, args)\n\ndef delete(id):\n\tif id and id.isdigit():\n\t\tsql = 'delete from entries where id=%s' % id\n\t\tdo_sql(sql)\n\ndef get_record_list():\n\tsql = 'select * from entries'\n\trecord_list = do_sql(sql)\n\treturn record_list\n\ndef init():\n\tsql = 'drop table if exists entries'\n\tdo_sql(sql)\n\tsql = 'create table entries (\\\n\t\t\t\tid integer primary key autoincrement,\\\n\t\t\t\ttitle text not null,\\\n\t\t\t\tart text not null)'\n\tdo_sql(sql)\n\ndef test():\n\t# init()\n\ttitle = 'hello'\n\tart = '^_^'\n\tinsert(title, art)\n\trecord_list = get_record_list()\n\tfor item in record_list:\n\t\tprint(*item)\n\ntest()\n"
},
{
"alpha_fraction": 0.4897959232330322,
"alphanum_fraction": 0.523809552192688,
"avg_line_length": 16.352941513061523,
"blob_id": "e7735442589e380588867e85e0faffbec8a9a6d8",
"content_id": "6a0b6b7fb0b023f47e4fe401345a2eaaa9c27b24",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 294,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 17,
"path": "/udacity/cs253/Lesson02a_Templates/FizzBuzz.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "def get(n):\n\tfizzbuzz = []\n\tif n and n.isdigit():\n\t\tn = int(n)\n\t\tif n > 30:\n\t\t\tn = 30\n\t\tfor i in range(1, n+1):\n\t\t\tif not i % 15:\n\t\t\t\titem = 'FizzBuzz'\n\t\t\telif not i % 3:\n\t\t\t\titem = 'Fizz'\n\t\t\telif not i % 5:\n\t\t\t\titem = 'Buzz'\n\t\t\telse:\n\t\t\t\titem = str(i)\n\t\t\tfizzbuzz.append(item)\n\treturn fizzbuzz"
},
{
"alpha_fraction": 0.7692307829856873,
"alphanum_fraction": 0.7692307829856873,
"avg_line_length": 22.450000762939453,
"blob_id": "fcbb8b5f528af50693052c78d3bf12f2aa940e19",
"content_id": "f42be2d2971bc4ec86180b763d13b48e392f0fc7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 468,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 20,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/db/schema.sql",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "drop table if exists posts;\ndrop table if exists artworks;\ndrop table if exists users;\n\ncreate table posts (\n\tid integer primary key autoincrement,\n\tupdate_date date not null,\n\tsubject text not null,\n\tcontent text not null);\n\ncreate table artworks (\n\tid integer primary key autoincrement,\n\tsubject text not null,\n\tcontent text not null);\n\ncreate table users (\n\tid integer primary key autoincrement,\n\tusername text not null unique,\n\tpw_hash text not null,\n\temail text);"
},
{
"alpha_fraction": 0.6941508054733276,
"alphanum_fraction": 0.7159971594810486,
"avg_line_length": 27.399999618530273,
"blob_id": "6dab9d71708e61ca89946c30b0958563d7725d5b",
"content_id": "f64ee449ca4d8d602f7759de64bcd48e435eeaa0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1793,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 50,
"path": "/website/index.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\nimport os.path\nfrom time import time\nfrom subprocess import call\n\nbasepath = 'static/index/db/%s'\nfilename_readhub = basepath % 'readhub.json'\nfilename_36kr = basepath % '36kr.json'\n\nclass IndexHandler(Page):\n\tfilename = 'index/index.html'\n\tdef get(self):\n\t\tself.update_json()\n\t\tif self.get_args('q') == 'json':\n\t\t\treturn self.q_json()\n\n\t\treturn self.render_raw(self.filename)\n\n\tdef update_json(self):\n\t\tlast_modified_readhub = os.path.getmtime(filename_readhub)\n\t\tlast_modified_36kr = os.path.getmtime(filename_36kr)\n\t\tnow = time()\n\t\texpires_readhub = now - last_modified_readhub > 3600\n\t\texpires_36kr = now - last_modified_36kr > 3600\n\n\t\t# 若本次请求时,距离上一次更新文件已经超过1h,则再次更新文件\n\t\t# 通过用户的请求来更新,而不是主动更新,节省资源\n\t\tif expires_readhub and expires_36kr:\n\t\t\tfilename = basepath % 'get_news.py'\n\t\t\tprint(filename)\n\t\t\tcall(['python3', filename])\n\t\n\tdef q_json(self):\n\t\t# 对于已经更新过的json数据\n\t\t# 由于客户端是通过axios(而不是浏览器)进行json请求的\n\t\t# 所以在服务端设置cache-control没有用\n\t\t# 服务端只在json_response中添加last-modified\n\t\t# 在客户端的app.js,进行逻辑判断,now - last-modified > 120s,则请求json,否则不发起请求\n\t\t\n\t\t# 取消,缓存的是css, js和png,是html里面固定了的东西\n\t\t# 每次刷新都会重新请求的是index.html\n\t\t# js没办法获取cache中的json,因此每次都必须重新请求json,否则填充的内容就是空的\n\t\t\n\t\tif self.get_args('src') == 'readhub':\n\t\t\twith open(filename_readhub) as f:\n\t\t\t\treturn self.json_response(data=f.read())\n\n\t\tif self.get_args('src') == '36kr':\n\t\t\twith open(filename_36kr) as f:\n\t\t\t\treturn self.json_response(data=f.read())"
},
{
"alpha_fraction": 0.623481810092926,
"alphanum_fraction": 0.6547136902809143,
"avg_line_length": 26,
"blob_id": "b330a5df1e1528bfa8c16919acb3b58660e2d6d8",
"content_id": "1d659e5b7370231c97a28ee0389735bf243ec22e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1783,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 64,
"path": "/website/static/index/db/get_news.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import requests, json, time\nimport logging\n\nsess = requests.Session()\nheaders = {\n\t'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n\t'Accept-Encoding': 'gzip, deflate',\n\t'Accept-Language': 'zh-CN,zh;q=0.9',\n\t'Host': '36kr.com',\n\t'Connection': 'keep-alive',\n\t'Cache-Control': 'no-cache',\n\t'Pragma': 'no-cache',\n\t'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36'\n}\n\ndef get_readhub():\n\turl = 'https://api.readhub.me/topic'\n\tparams = {'pageSize': '20'}\n\t# 前端,一次获取2页,每页20条\n\t# 后端,3小时更新一次,一次更新20条\n\ttry:\n\t\tresp = sess.get(url, headers=headers, params=params).text\n\t\tdata = json.loads(resp)['data']\n\t\tnewsList = []\n\t\tfor item in data:\n\t\t\tnews = {}\n\t\t\tnews['date'] = item['createdAt']\n\t\t\tnews['subject'] = item['title']\n\t\t\tnews['content'] = item['summary']\n\t\t\tnewsList.append(news)\n\t\tfilename = './static/index/db/readhub.json'\n\t\twith open(filename, 'w') as f:\n\t\t\tf.write(json.dumps(newsList, ensure_ascii=False))\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\ndef get_36kr():\n\turl = 'https://36kr.com/api/newsflash'\n\tparams = {'per_page': '20'}\n\ttry:\n\t\tresp = sess.get(url, headers=headers, params=params).text\n\t\tdata = json.loads(resp)['data']['items']\n\t\tnewsList = []\n\t\tfor item in data:\n\t\t\tnews = {}\n\t\t\tnews['date'] = item['created_at']\n\t\t\tnews['subject'] = item['title']\n\t\t\tnews['content'] = item['description']\n\t\t\tnewsList.append(news)\n\t\tfilename = './static/index/db/36kr.json'\n\t\twith open(filename, 'w') as f:\n\t\t\tf.write(json.dumps(newsList, ensure_ascii=False))\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\n\ndef update():\n\tget_readhub()\n\tget_36kr()\n\tprint('updated')\n\nupdate()\n\n# TODO crontab\n\n"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.75,
"avg_line_length": 21.66666603088379,
"blob_id": "1b04238aeedf7c7c86824ebba2867a2d9615c3a2",
"content_id": "9206ba46559225d0898558ee4bf18e7e08f82ed5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 136,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 6,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/index.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass IndexHandler(Page):\n\tfilename = 'index/index.html'\n\tdef get(self):\n\t\treturn self.render_raw(self.filename)\n"
},
{
"alpha_fraction": 0.6678082346916199,
"alphanum_fraction": 0.6678082346916199,
"avg_line_length": 23.33333396911621,
"blob_id": "c3b1f2303253b66905c075a8e5aed43d02f29d6d",
"content_id": "86bc6db8667cdca9a21b23f5cc9ff96e2bed650d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 292,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 12,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/nba.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass NBAHandler(Page):\n\tfilename = 'nba/nba.html'\n\n\tdef get(self):\n\t\tif self.get_args('q') == 'json':\n\t\t\twith open('./static/nba/assets/team_list.json') as f:\n\t\t\t\tdata = f.read()\n\t\t\treturn self.json_response(data=data)\n\t\telse:\n\t\t\treturn self.render_raw(self.filename)\n"
},
{
"alpha_fraction": 0.6642156839370728,
"alphanum_fraction": 0.6642156839370728,
"avg_line_length": 26.266666412353516,
"blob_id": "2bea8b36fd8f37d6f712e76867ef0273cdd972b3",
"content_id": "f797f6c30f02dea19ea5c600e422900a99d6fb7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 408,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 15,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/welcome.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass WelcomeHandler(Page):\n\tfilename = 'welcome/welcome.html'\n\tdef get(self):\n\t\tif self.check_valid_cookie():\n\t\t\tif self.get_args('q') == 'json':\n\t\t\t\tusername = self.cookies().get('username')\n\t\t\t\tuser = [{'username': username}]\n\t\t\t\treturn self.json_response(user)\n\t\t\telse:\n\t\t\t\treferer = self.get_referer()\n\t\t\t\treturn self.render_raw(self.filename)\n\t\telse:\n\t\t\treturn self.redirect('/')"
},
{
"alpha_fraction": 0.6329411864280701,
"alphanum_fraction": 0.6564705967903137,
"avg_line_length": 22.61111068725586,
"blob_id": "e3208e125869425e46a19d1cb25a8b48e635bc31",
"content_id": "7e4fd27b914abc554ca77e38f1528fde6feb8079",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 425,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 18,
"path": "/udacity/cs253/ProblemSet02/Quiz1_ROT13/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\nfrom flask import Flask, request, redirect, url_for\nimport ROT13, Page\n\napp = Flask(__name__)\n\[email protected]('/', methods=['GET'])\ndef GetRequestHandler():\n\treturn Page.make_page()\n\[email protected]('/', methods=['POST'])\ndef PostRequestHandler():\n\ttext = ROT13.encode( request.form['text'] )\n\treturn Page.make_page(text=text)\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n"
},
{
"alpha_fraction": 0.6190000176429749,
"alphanum_fraction": 0.6230000257492065,
"avg_line_length": 21.727272033691406,
"blob_id": "0618a12fa342f8360f747830dda9a53abffb533f",
"content_id": "2cca7316e7c6aa09e964796c0629742635e23c95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1000,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 44,
"path": "/udacity/cs253/Integration/_01-02_/Page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import os\nfrom html import escape\n\ndef fill_template(name='index', **kw):\n\ttry:\n\t\tfilename = '%s.html' % name\n\t\tpath = os.path.join('.', 'templates', filename)\n\t\twith open(filename, 'r') as f:\n\t\t\tpage = f.read()\n\t\targs = dict( (key, escape(val)) for (key, val) in kw.items() )\n\t\treturn page % args\n\texcept:\n\t\treturn 'Page Not Found!'\n\ndef get_default_signup_args():\n\treturn {'username': '',\n\t\t\t'username_error': '',\n\t\t\t'password_error': '',\n\t\t\t'verify_error': '',\n\t\t\t'email': '',\n\t\t\t'email_error': ''}\n\ndef render_index():\n\tpage = fill_template('index')\n\treturn page\n\ndef render_rot13(text=''):\n\targs = {'text': text}\n\treturn fill_template('rot13', **args)\n\ndef render_signup(form={}):\n\tif form:\n\t\targs = form\n\telse:\n\t\targs = get_default_signup_args()\n\tprint(args)\n\treturn fill_template('signup', **args)\n\ndef render_welcome(username=''):\n\tif username:\n\t\targs = {'username': username, 'a': 'a'}\n\t\treturn fill_template('welcome', **args)\n\telse:\n\t\treturn 'Invalid username<br><br><a href=\"/\">Back</a>'\n"
},
{
"alpha_fraction": 0.6041666865348816,
"alphanum_fraction": 0.710349440574646,
"avg_line_length": 27.615385055541992,
"blob_id": "50800b86cf2f2359dc9789a775573df19dc90b31",
"content_id": "c25923ee46d68ec4719beb6f4df5733ba68b2466",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1768,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 52,
"path": "/application_layer/http/simple_client/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "##\tIntro\n\n*\t使用 `Python` 和 `C` 实现了一个简单的 `HTTP client`, 基本功能是通过 `TCP` 向 `server` 发出连接请求, `TCP` 携带的数据遵循 `HTTP` 格式, `request headers` 内容如下\n\n\t```\n\tGET / HTTP/1.1\\r\\nHost: github.com\\r\\nConnection: close\\r\\n\\r\\n\n\t```\n\n*\t仅用于网络编程学习和 `HTTP` 连接测试, `HTTPS` 需要另外处理\n\n*\t目标网站是 `github.com` (如果换成 `sina.com.cn`, 则会返回更多信息)\n\n\t`client` 发出 `http request` 访问 `http://github.com` 后, 如果网络正常, 会收到 `server` 发过来的 `http response`, 而且是一条 `302` 重定向信息, 其内容如下\n\n\t*\t未解码\n\n\t\t```\n\t\tb'HTTP/1.1 301 Moved Permanently\\r\\nContent-length: 0\\r\\nLocation: https://github.com/\\r\\nConnection: close\\r\\n\\r\\n'\n\t\t```\n\n\t*\t解码后\n\n\t\t```\n\t\tHTTP/1.1 301 Moved Permanently\n\t\tContent-length: 0\n\t\tLocation: https://github.com/\n\t\tConnection: close\n\n\n\t\t```\n\n\t我们把这个重定向信息(`http response`)打印出来, 然后退出程序, 测试完成\n\n\t<br>\n\n##\tReference\n\n*\t[TCP 编程](https://www.liaoxuefeng.com/wiki/0014316089557264a6b348958f449949df42a6d3a2e542c000/001432004374523e495f640612f4b08975398796939ec3c000)\n\n*\t[tcp client.c 1](https://blog.csdn.net/u012234115/article/details/54142273)\n\n*\t[tcp client.c 2](https://blog.csdn.net/u013377887/article/details/62429457)\n\n*\t[gethostbyname](http://man7.org/linux/man-pages/man3/gethostbyname.3.html)\n\n*\t[sockaddr_in](https://www.gta.ufrj.br/ensino/eel878/sockets/sockaddr_inman.html)\n\n*\t[AF_INET, IPv4](https://stackoverflow.com/questions/1593946/what-is-af-inet-and-why-do-i-need-it)\n\n*\t[HTTP - Messages](https://www.tutorialspoint.com/http/http_messages.htm)\n\n*\t[HTTP_headers](https://jjayyyyyyy.github.io/2017/05/01/HTTP_headers.html)\n"
},
{
"alpha_fraction": 0.6238145232200623,
"alphanum_fraction": 0.6338250637054443,
"avg_line_length": 21.069766998291016,
"blob_id": "e7b76fbe3a62d301872879997ff62d80c568545f",
"content_id": "8e86382de9c48bf28a66c91d48e806eef5a9fed4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1898,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 86,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/db/test_db.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import sqlite3, logging, datetime\ndb_name='database.db'\n\ndef do_sql(sql, args=''):\n\tres = None\n\ttry:\n\t\tconn = sqlite3.connect(db_name)\n\t\tconn.row_factory = sqlite3.Row\n\t\tcur = conn.cursor()\n\t\tcur.execute(sql, args)\n\t\tres = cur.fetchall()\n\t\tconn.commit()\n\texcept Exception as e:\n\t\tlogging.exception(e)\n\tfinally:\n\t\tcur.close()\n\t\tconn.close()\n\treturn res\n\ndef insert():\n\tsql = 'insert into posts (update_date, subject, content) values (?, ?, ?)'\n\tupdate_date = datetime.date.today()\n\tsubject = 'hello'\n\tcontent = 'world'\n\targs = (update_date, subject, content)\n\tres = do_sql(sql, args)\n\ndef insert_art():\n\tsql = 'insert into artworks (subject, content) values (?, ?)'\n\tsubject = 'hello'\n\tcontent = 'world'\n\targs = (subject, content)\n\tres = do_sql(sql, args)\n\ndef insert_user():\n\tsql = 'insert into users (username, pw_hash, email) values (?, ?, ?)'\n\targs = ('123', '123', '123')\n\tdo_sql(sql, args)\n\ndef get_record_list():\n\t# sql = 'select * from artworks'\n\t# sql = 'select * from users'\n\tsql = 'select * from posts'\n\trecord_list = do_sql(sql)\n\t# sql = 'select username from users where username=?'\n\t# args = ('1234', )\n\n\t# record_list = do_sql(sql, args)\n\treturn record_list\n\ndef update():\n\ttoday = post_date = datetime.date.today()\n\tprint(type(today))\n\tquery = 'update posts set post_date = ? where post_id = ?)'\n\targs = (today, '1')\n\t# do_sql(query, args)\n\ndef init():\n\tsql = 'drop table if exists users'\n\tdo_sql(sql)\n\tsql = 'create table users (\\\n\t\t\t\tuser_id integer primary key autoincrement,\\\n\t\t\t\tusername date not null unique,\\\n\t\t\t\tpw_hash text not null,\\\n\t\t\t\temail text)'\n\tdo_sql(sql)\n\n\ndef test():\n\t# init()\n\t# insert_art()\n\t# post_id = '1'\n\t\n\t# post_subject = \n\t# art = '^_^'\n\t# update()\n\t# insert_user()\n\trecord_list = get_record_list()\n\t# print(record_list[0]['username'])\n\tfor item in record_list:\n\t\tprint(*item)\n\t\t# print(dict(item).get('username'))\n\t# \tprint(item['username'])\n\t\n\ntest()\n"
},
{
"alpha_fraction": 0.7058823704719543,
"alphanum_fraction": 0.75,
"avg_line_length": 21.66666603088379,
"blob_id": "46dbf4682d8bd8066a57e643f699a39ecae4d7d2",
"content_id": "9142e41388b0d7246f1c5b878430f0cfcc311d9d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 136,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 6,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/rot13.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\n\nclass ROT13Handler(Page):\n\tfilename = 'rot13/rot13.html'\n\tdef get(self):\n\t\treturn self.render_raw(self.filename)\n"
},
{
"alpha_fraction": 0.6528623700141907,
"alphanum_fraction": 0.6571254730224609,
"avg_line_length": 25.918033599853516,
"blob_id": "19ebb7854b929056ea9fb1478b3d2964a71ce7a5",
"content_id": "d94a9402be8368b59082dac8ebfa2ff007d57393",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1738,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 61,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/artwork.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\nfrom database import Database\n\nclass Artwork(object):\n\tdef __init__(self, id='', subject='', content=''):\n\t\tself.id = id\n\t\tself.subject = subject\n\t\tself.content = content\n\nclass Record(Artwork):\n\tdef insert(self):\n\t\tquery = 'insert into artworks (subject, content) values (?, ?)'\n\t\targs = (self.subject, self.content)\n\t\treturn Database().query_db(query, args)\n\n\tdef update(self):\n\t\tquery = 'update artworks set subject = ?, content = ? where id = ?'\n\t\targs = (self.subject, self.content, self.id)\n\t\treturn Database().query_db(query, args)\n\t\t\n\tdef retrieve(self, id=0, limit=10):\n\t\tif id > 0:\n\t\t\tquery = 'select * from artworks where id = %d' % id\n\t\telse:\n\t\t\tquery = 'select * from artworks order by id desc limit %d' % limit\n\t\treturn Database().query_db(query)\n\t\n\tdef delete(self, id):\n\t\tquery = 'delete from artworks where id = ?'\n\t\targs = (id, )\n\t\tDatabase().query_db(query, args)\n\nclass AsciiArtHandler(Page):\n\tfilename = 'artwork/artwork.html'\n\n\tdef get(self):\n\t\tif self.get_args('q') == 'json':\n\t\t\trecord_list = Record().retrieve(limit=10)\n\t\t\treturn self.json_response(record_list)\n\t\telse:\n\t\t\treturn self.render_raw(self.filename)\n\n\tdef post(self):\n\t\tif self.check_valid_cookie():\n\t\t\tprint('logged')\n\t\t\tform = self.form()\n\t\t\tsubject = form.get('subject')\n\t\t\tcontent = form.get('content')\n\t\t\tif subject and content:\n\t\t\t\tid = form.get('id')\n\t\t\t\tif id:\n\t\t\t\t\tRecord(id, subject, content).update()\n\t\t\t\t\t# 不必重新读取数据库并返回json,而是直接告诉前端,刚才提交的数据是合理的,在前端直接用本地数据更新dom即可\n\t\t\t\t\treturn 'updated'\n\t\t\t\telse:\n\t\t\t\t\tRecord(0, subject, content).insert()\n\t\t\t\t\treturn 'inserted'\n\t\t\telse:\n\t\t\t\treturn 'invalid form'\n\t\telse:\n\t\t\treturn 'signin'\n"
},
{
"alpha_fraction": 0.6630434989929199,
"alphanum_fraction": 0.77173912525177,
"avg_line_length": 17.600000381469727,
"blob_id": "f7ff8cd6ed6f3d6b15fb280d427c2ce20eb9d70a",
"content_id": "c2e114ebebbd871f26b9cf4dc8ce3c44edb9c45a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 100,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 5,
"path": "/udacity/cs253/Lesson02_-27-50-_FormsAndInput/GAE/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "1.\tGAE使用的是python2.7\n\n2.\tdeploy script\n\ngcloud app deploy ./app.yaml --project abcdefg-190804"
},
{
"alpha_fraction": 0.6566265225410461,
"alphanum_fraction": 0.7168674468994141,
"avg_line_length": 24.538461685180664,
"blob_id": "cab6c2dc5921eb4fa91697ec02129670bfb8d8e9",
"content_id": "7d8a532bc97bd25627e933837d811bcee1885044",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 332,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 13,
"path": "/udacity/cs253/Integration/_01-04_/rot13.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from page import Page\nimport codecs\n\nclass ROT13Handler(Page):\n\tfilename = 'rot13.html'\n\tdef get(self):\n\t\treturn self.render(self.filename)\n\n\tdef post(self):\n\t\trot13_text = self.form().get('rot13_text')\n\t\tif rot13_text:\n\t\t\trot13_text = codecs.encode(rot13_text, ('rot13'))\n\t\treturn self.render(self.filename, rot13_text=rot13_text)\n"
},
{
"alpha_fraction": 0.6544811129570007,
"alphanum_fraction": 0.6591981053352356,
"avg_line_length": 19.682926177978516,
"blob_id": "72213e9a8448801af4ab4f5ecb75a175a034294b",
"content_id": "473b113fc874bdaf90c7458d59ba0afb80d3d641",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 848,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 41,
"path": "/udacity/cs253/Integration/_01-04_/database.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import sqlite3, logging\nDATABASE = 'db/database.db'\nSCHEMA = 'db/schema.sql'\n\ndef init_db():\n\ttry:\n\t\tconn = sqlite3.connect(DATABASE)\n\t\tcur = conn.cursor()\n\t\twith open(SCHEMA, 'r') as f:\n\t\t\tcur.executescript(f.read())\n\t\tconn.commit()\n\t\tprint('database initialized')\n\texcept Exception as e:\n\t\tprint('database failed')\n\t\tlogging.exception(e)\n\tfinally:\n\t\tcur.close()\n\t\tconn.close()\n\t\tprint('db closed')\n\nclass Database(object):\n\tdef __init__(self):\n\t\tpass\n\n\tdef query_db(self, query, args=()):\n\t\ttry:\n\t\t\tconn = sqlite3.connect(DATABASE)\n\t\t\tconn.row_factory = sqlite3.Row\n\t\t\tcur = conn.cursor()\n\t\t\tcur.execute(query, args)\n\t\t\trecord_list = cur.fetchall()\n\t\t\tconn.commit()\n\t\texcept Exception as e:\n\t\t\trecord_list = []\n\t\t\tlogging.exception(e)\n\t\t\tprint('query failed')\n\t\tfinally:\n\t\t\tcur.close()\n\t\t\tconn.close()\n\t\t\tprint('db closed')\n\t\t\treturn record_list\n"
},
{
"alpha_fraction": 0.6502857208251953,
"alphanum_fraction": 0.6571428775787354,
"avg_line_length": 29.701753616333008,
"blob_id": "4f8780e1d0fda81e2a87de1d14c3741555f91bef",
"content_id": "ba65304a82f7627a6dbd79de6f91f01e4a014b02",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1750,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 57,
"path": "/udacity/cs253/Integration/_01-03_/query.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "import datetime\n\nclass QueryArtwork():\n\tdef __init__(self):\n\t\tpass\n\n\tdef insert(self, subject, content):\n\t\tquery = 'insert into artworks (subject, content) values (?, ?)'\n\t\targs = (subject, content)\n\t\treturn (query, args)\n\n\tdef update(self, subject, content, artwork_id):\n\t\tquery = 'update artworks set subject = ?, content = ? where artwork_id = ?'\n\t\targs = (subject, content, str(artwork_id))\n\t\treturn (query, args)\n\t\t\n\tdef retrieve(self, select_one=True, artwork_id=1, limit=10):\n\t\tif select_one == True:\n\t\t\tquery = 'select * from artworks where artwork_id = %d' % artwork_id\n\t\telse:\n\t\t\tquery = 'select * from artworks order by artwork_id desc limit %d' % limit\n\t\targs = ()\n\t\treturn (query, args)\n\t\n\tdef delete(self, artwork_id):\n\t\tquery = 'delete from artworks where artwork_id = ?'\n\t\targs = artwork_id\n\t\treturn (query, args)\n\nclass QueryPost(object):\n\tdef __init__(self):\n\t\tpass\n\t\t\n\tdef insert(self, subject, content):\n\t\tquery = 'insert into posts (update_date, subject, content) values (?, ?, ?)' \n\t\tupdate_date = datetime.date(2018, 1, 1)\n\t\targs = (update_date, subject, content)\n\t\treturn (query, args)\n\n\tdef update(self, subject, content, post_id):\n\t\tquery = 'update posts set update_date = ?, subject = ?, content = ? where post_id = ?'\n\t\tupdate_date = datetime.date.today()\n\t\targs = (update_date, subject, content, str(post_id))\n\t\treturn (query, args)\n\n\tdef retrieve(self, select_one=True, post_id=1, limit=10):\n\t\tif select_one == True:\n\t\t\tquery = 'select * from posts where post_id = %d' % post_id\n\t\telse:\n\t\t\tquery = 'select * from posts order by post_id desc limit %d' % limit\n\t\targs = ()\n\t\treturn (query, args)\n\n\tdef delete(self, post_id):\n\t\tquery = 'delete from artworks where artwork_id = ?'\n\t\targs = artwork_id\n\t\treturn (query, args)\n"
},
{
"alpha_fraction": 0.7821229100227356,
"alphanum_fraction": 0.7821229100227356,
"avg_line_length": 24.571428298950195,
"blob_id": "d9252df8987dbb375bfda1523f99b0738e80b258",
"content_id": "a0f26270b154a126d8a7fd3111dd01bd96b62a85",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 221,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 7,
"path": "/vuejs/Getting_Started/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#### tips\n\n可以通过访问\n\n[https://jjayyyyyyy.github.io/network/vuejs/Getting_Started/index.html](https://jjayyyyyyy.github.io/network/vuejs/Getting_Started/index.html)\n\n来查看页面效果,而不只是纯文本\n"
},
{
"alpha_fraction": 0.6336996555328369,
"alphanum_fraction": 0.6434676647186279,
"avg_line_length": 23.81818199157715,
"blob_id": "2ab44bb50c2ac108bb297720271ef49b5b51ddb9",
"content_id": "367042fa421c6e12baa5ade6c9a36d9fefa94145",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2457,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 99,
"path": "/udacity/cs253/Integration/_01-03_/page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import render_template\nimport DS\n\ndef fill_template(name='index', **kw):\n\tfilename = name + '.html'\n\treturn render_template(filename, **kw)\n\nclass Page(object):\n\tdef __init__(self):\n\t\tself.name = 'index'\n\n\tdef render(self, **kw):\n\t\treturn fill_template(name=self.name, **kw)\n\nclass Index(Page):\n\tdef __init__(self):\n\t\tself.name = 'index'\n\nclass ROT13(Page):\n\tdef __init__(self):\n\t\tself.name = 'rot13'\n\n\tdef render(self, rot13_text=''):\n\t\trot13_text = self.rotate(rot13_text)\n\t\treturn fill_template(name=self.name, rot13_text=rot13_text)\n\n\tdef rotate(self, text, res='', offset=13):\n\t\tfor ch in text:\n\t\t\tif ch.isalpha():\n\t\t\t\tif (ch >= 'a' and ch <= 'm') or (ch >= 'A' and ch <= 'M'):\n\t\t\t\t\tres = res + chr( ord(ch) + offset )\n\t\t\t\telse:\n\t\t\t\t\tres = res + chr( ord(ch) - offset )\n\t\t\telse:\n\t\t\t\tres = res + ch\n\t\treturn res\n\nclass FizzBuzz(Page):\n\tdef __init__(self, fizzbuzz_strn):\n\t\tself.name = 'fizzbuzz'\n\t\tself.fizzbuzz_list = []\n\t\tif fizzbuzz_strn and fizzbuzz_strn.isdigit():\n\t\t\tself.fizzbuzz_n = int(fizzbuzz_strn)\n\t\t\tself.get_fizzbuzz_list()\n\n\tdef render(self):\n\t\treturn fill_template(name=self.name, fizzbuzz_list=self.fizzbuzz_list)\n\n\tdef get_fizzbuzz_list(self, limit=30):\n\t\tif self.fizzbuzz_n > limit:\n\t\t\tself.fizzbuzz_n = limit\n\t\tfor i in range(1, self.fizzbuzz_n + 1):\n\t\t\tif not i % 15:\n\t\t\t\titem = 'FizzBuzz'\n\t\t\telif not i % 3:\n\t\t\t\titem = 'Fizz'\n\t\t\telif not i % 5:\n\t\t\t\titem = 'Buzz'\n\t\t\telse:\n\t\t\t\titem = str(i)\n\t\t\tself.fizzbuzz_list.append(item)\n\nclass Welcome(Page):\n\tdef __init__(self, username=''):\n\t\tself.name = 'welcome'\n\nclass AsciiArt(Page):\n\tdef __init__(self):\n\t\tself.name = 'ascii_art'\n\n\tdef render(self, record_list=[], **kw):\n\t\tartwork_list = []\n\t\tfor record in record_list:\n\t\t\tartwork = DS.Artwork(*record)\n\t\t\tartwork_list.append(artwork)\n\t\treturn fill_template(name=self.name, artwork_list=artwork_list, **kw)\n\nclass Blog(Page):\n\tdef __init__(self):\n\t\tself.name = 'blog'\n\n\tdef render_edit_post(self, record, **kw):\n\t\tpost = DS.Post(*record)\n\t\treturn fill_template(name='blog_edit_post', post=post, **kw)\n\n\tdef render_index(self, record_list=[]):\n\t\tpost_list = []\n\t\tfor record in record_list:\n\t\t\tpost = DS.Post(*record)\n\t\t\tpost_list.append(post)\n\t\treturn fill_template(name='blog_index', post_list=post_list)\n\n\tdef render_post(self, record):\n\t\tpost = DS.Post(*record)\n\t\tprint(post.subject)\n\t\treturn fill_template(name='blog_post', post=post)\n\n\tdef render_new_post(self, **kw):\n\t\treturn fill_template(name='blog_new_post', **kw)\n"
},
{
"alpha_fraction": 0.5841924548149109,
"alphanum_fraction": 0.6185566782951355,
"avg_line_length": 18.399999618530273,
"blob_id": "69da553485cee1a44e28b6481c0facdbee29ceba",
"content_id": "7e952263e1242b2d7998f7e4754b6e7b91fe1146",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 291,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 15,
"path": "/udacity/cs253/ProblemSet02/Quiz1_ROT13/Page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from html import escape\n\ntitle = '<h2>ROT13:</h2>'\n\nform = '''\\\n<form method=\"post\">\n\t<textarea name=\"text\" style=\"height: 100px; width: 400px;\">%(text)s</textarea>\n\t<br>\n\t<input type=\"submit\">\n</form>\n'''\n\ndef make_page(text=''):\n\tpage = title + form % { 'text': escape(text)}\n\treturn page\n"
},
{
"alpha_fraction": 0.6476595997810364,
"alphanum_fraction": 0.6697872281074524,
"avg_line_length": 25.08888816833496,
"blob_id": "d3472925d1dd18920f9b61f615def67f3d12907a",
"content_id": "e6b1516e649ae40a0b51c15a9f8876f4055ca347",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1175,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 45,
"path": "/udacity/cs253/Lesson02a_Templates/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\nfrom flask import Flask, request, redirect, url_for\nimport Page, Check\n\napp = Flask(__name__)\n\[email protected]('/fizzbuzz', methods=['GET'])\ndef get_fizzbuzz():\n\tn = request.args.get('n')\n\treturn Page.render_fizzbuzz(n)\n\[email protected]('/', methods=['GET'])\ndef get_index():\n\treturn Page.render_index()\n\[email protected]('/rot13', methods=['GET'])\ndef get_rot13():\n\treturn Page.render_rot13()\n\[email protected]('/rot13', methods=['POST'])\ndef post_rot13():\n\ttext = request.form.get('text')\n\treturn Page.render_rot13(text)\n\[email protected]('/signup', methods=['GET'])\ndef get_signup():\n\treturn Page.render_signup()\n\[email protected]('/signup', methods=['POST'])\ndef post_signup():\n\tchecked_form = Check.check_signup_form( request.form )\n\tif checked_form.get('valid') == True:\n\t\treturn redirect( url_for( 'get_welcome', username=checked_form.get('username') ) )\n\telse:\n\t\treturn Page.render_signup(checked_form)\n\[email protected]('/welcome', methods=['GET'])\ndef get_welcome():\n\tusername = request.args.get('username')\n\treturn Page.render_welcome(username)\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n\t# app.run(port=8080, host='0.0.0.0', debug=False)\n\n"
},
{
"alpha_fraction": 0.6442603468894958,
"alphanum_fraction": 0.6539645195007324,
"avg_line_length": 27.54729652404785,
"blob_id": "2a72a3cb25bc7b05551c3ba9da124a4ba735ee98",
"content_id": "e1438c38c772b3b05b619aa4b7fcb6b338c4ad77",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4405,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 148,
"path": "/udacity/cs253/Integration/_01-03_/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import Flask, request, redirect, url_for, g\nimport sqlite3, logging\nimport page, check\nfrom query import QueryArtwork, QueryPost\n\n# config\nDATABASE = 'db/database.db'\n\n# create app\t\napp = Flask(__name__)\n\n# Database\ndef get_conn():\n\tif not hasattr(g, 'sqlite3_conn'):\n\t\tconn = sqlite3.connect(DATABASE)\n\t\tconn.row_factory = sqlite3.Row\n\t\tg.sqlite3_conn = conn\n\treturn g.sqlite3_conn\n\n# $ export FLASK_APP=main\n# $ python3 -m flask initdb\[email protected]('initdb')\ndef init_db():\n\twith app.app_context():\n\t\tcur = get_conn().cursor()\n\t\twith app.open_resource('schema.sql', mode='r') as f:\n\t\t\tcur.executescript(f.read())\n\t\tcur.close()\n\t\tprint('database initialized')\n\[email protected]_appcontext\ndef close_conn(error):\n\tif hasattr(g, 'sqlite3_conn'):\n\t\tconn = g.sqlite3_conn\n\t\tconn.cursor().close()\n\t\tconn.commit()\n\t\tconn.close()\n\t\tprint('db closed')\n\ndef query_db(query, args=()):\n\tcur = get_conn().cursor()\n\tcur.execute(query, args)\n\treturn cur\n\[email protected]('/', methods=['GET'])\ndef get_index():\n\tindex = page.Index()\n\treturn index.render()\n\[email protected]('/rot13', methods=['GET'])\ndef get_rot13():\n\trot13 = page.ROT13()\n\treturn rot13.render()\n\[email protected]('/rot13', methods=['POST'])\ndef post_rot13():\n\trot13 = page.ROT13()\n\treturn rot13.render(request.form['rot13_text'])\n\[email protected]('/fizzbuzz', methods=['GET'])\ndef get_fizzbuzz():\n\tfizzbuzz = page.FizzBuzz( request.args.get('fizzbuzz_strn') )\n\treturn fizzbuzz.render()\n\[email protected]('/welcome', methods=['GET'])\ndef get_welcome():\n\tusername=request.args.get('username')\n\twelcome = page.Welcome()\n\treturn welcome.render(username=username)\n\n#############################################################\n# artwork\[email protected]('/ascii_art', methods=['GET'])\ndef get_ascii_art():\n\tquery = QueryArtwork().retrieve(select_one=False, limit=10)\n\trecord_list = query_db(*query).fetchall()\n\tascii_art = page.AsciiArt()\n\treturn ascii_art.render(record_list=record_list)\n\n# TODO: 在前端就进行检查,subject和content若为空,则不提交,而不是到了服务器再检查\[email protected]('/ascii_art', methods=['POST'])\ndef post_ascii_art():\n\tsubject = request.form.get('subject')\n\tcontent = request.form.get('content')\n\tif subject and content:\n\t\tartwork_id = request.form.get('artwork_id')\n\t\tif artwork_id:\n\t\t\tquery = QueryArtwork().update(subject, content, artwork_id)\n\t\telse:\n\t\t\tquery = QueryArtwork().insert(subject, content)\n\t\t\tprint('YES')\n\t\tquery_db(*query)\n\treturn redirect(url_for('get_ascii_art'))\n# end artwork\n#############################################################\n\n#############################################################\n# blogs and posts\[email protected]('/blog', methods=['GET'])\ndef get_blog_index():\n\tquery = QueryPost().retrieve(select_one=False, limit=10)\n\trecord_list = query_db(*query).fetchall()\n\tblog = page.Blog()\n\treturn blog.render_index(record_list=record_list)\n\[email protected]('/blog/edit/<int:post_id>', methods=['GET'])\ndef get_blog_edit(post_id):\n\tquery = QueryPost().retrieve(select_one=True, post_id=post_id)\n\trecord = query_db(*query).fetchall()[0]\n\tblog = page.Blog()\n\treturn blog.render_edit_post(record=record)\n\[email protected]('/blog/new_post', methods=['GET'])\ndef get_blog_new():\n\tblog = page.Blog()\n\treturn blog.render_new_post()\n\[email protected]('/blog/<int:post_id>', methods=['GET'])\ndef get_blog_post(post_id):\n\tquery = QueryPost().retrieve(select_one=True, post_id=post_id)\n\trecord = query_db(*query).fetchall()[0]\n\tblog = page.Blog()\n\treturn blog.render_post(record=record)\n\n# TODO: 在前端就进行检查,subject和content若为空,则不提交,而不是到了服务器再检查\[email protected]('/blog/edit/<int:post_id>', methods=['POST'])\ndef post_blog_edit(post_id):\n\tsubject = request.form.get('subject')\n\tcontent = request.form.get('content')\n\tif subject and content:\n\t\tquery = QueryPost().update(subject, content, post_id)\n\t\tquery_db(*query)\n\treturn redirect('/blog/%d' % post_id)\n\n# TODO: 在前端就进行检查,subject和content若为空,则不提交,而不是到了服务器再检查\[email protected]('/blog/new_post', methods=['POST'])\ndef post_blog_new():\n\tsubject = request.form.get('subject')\n\tcontent = request.form.get('content')\n\tif subject and content:\n\t\tquery = QueryPost().insert(subject, content)\n\t\tquery_db(*query)\n\treturn redirect('/blog')\n# end blogs and posts\n#############################################################\n\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n"
},
{
"alpha_fraction": 0.6988543272018433,
"alphanum_fraction": 0.6988543272018433,
"avg_line_length": 23.479999542236328,
"blob_id": "02270bd2b048e64a2b399ebdd0547e3d01020977",
"content_id": "a4af6d2b57499e97235d5fc8b1d89597c149b831",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 611,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 25,
"path": "/udacity/cs253/ProblemSet03/page.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import render_template\nfrom post import Post\n\ndef fill_template(name='blog', **kw):\n\tfilename = name + '.html'\n\treturn render_template(filename, **kw)\n\ndef render_blog(record_list=[]):\n\tpost_list = []\n\tfor record in record_list:\n\t\tpost = Post(*record)\n\t\tpost_list.append(post)\n\treturn fill_template(name='blog', post_list=post_list)\n\ndef render_post(record):\n\tpost = None\n\tif record:\n\t\tpost = Post(*record)\n\treturn fill_template(name='post', post=post)\n\ndef render_new_post(**kw):\n\treturn fill_template(name='new_post', **kw)\n\ndef render_edit_post(**kw):\n\treturn fill_template(name='edit_post', **kw)"
},
{
"alpha_fraction": 0.622193455696106,
"alphanum_fraction": 0.6291018724441528,
"avg_line_length": 24.450550079345703,
"blob_id": "1edaaafd106bd9033ab370c046d9775c79c4c58e",
"content_id": "1a66cd99ebf3861d7b569d584c09c7f0271fadd7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2316,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 91,
"path": "/udacity/cs253/Lesson03_Databases/main.py",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "from flask import Flask, request, redirect, url_for, g\nimport sqlite3\nimport page\napp = Flask(__name__)\n\n#################################################################################\nDATABASE = 'ascii_art.db'\n\ndef get_conn():\n\tif not hasattr(g, 'sqlite3_conn'):\n\t\tconn = sqlite3.connect(DATABASE)\n\t\tconn.row_factory = sqlite3.Row\n\t\tg.sqlite3_conn = conn\n\treturn g.sqlite3_conn\n\ndef init_db():\n\twith app.app_context():\n\t\tcur = get_conn().cursor()\n\t\twith app.open_resource('schema.sql', mode='r') as f:\n\t\t\tcur.executescript(f.read())\n\t\tcur.close()\n\[email protected]('initdb')\ndef initdb_command():\n\tinit_db()\n\tprint('database initialized')\n\[email protected]_appcontext\ndef close_connection(exception):\n\tif hasattr(g, 'sqlite3_conn'):\n\t\tconn = g.sqlite3_conn\n\t\tconn.cursor().close()\n\t\tconn.commit()\n\t\tconn.close()\n\t\tprint('connection closed')\n\ndef query_db(query, args=()):\n\tcur = get_conn().cursor()\n\tcur.execute(query, args)\n\treturn cur\n\ndef insert_record(args):\n\tif args and isinstance(args, tuple) and len(args) == 2:\n\t\tquery = 'insert into entries (subject, content) values (?, ?)'\n\t\tquery_db(query, args=args)\n\t\treturn True\n\telse:\n\t\treturn False\n\ndef update_record(args):\n\tif args and isinstance(args, tuple) and len(args) == 3:\n\t\tquery = 'update entries set subject = ?, content = ? where art_id = ?'\n\t\tquery_db(query, args)\n\t\treturn True\n\telse:\n\t\treturn False\n\ndef get_record_list(limit=10):\n\tquery = 'select * from entries order by art_id desc limit %d' % limit\n\tcur = query_db(query)\n\trecord_list = cur.fetchall()\n\treturn record_list\n#################################################################################\n\n\n\[email protected]('/', methods=['GET'])\ndef get_index():\n\treturn page.render_index()\n\[email protected]('/ascii_art', methods=['GET'])\ndef get_ascii_art():\n\trecord_list = get_record_list()\n\treturn page.render_art(record_list=record_list)\n\[email protected]('/ascii_art', methods=['POST'])\ndef post_ascii_art():\n\tsubject = request.form.get('subject')\n\tcontent = request.form.get('content')\n\tif subject and content:\n\t\tart_id = request.form.get('art_id')\n\t\tif art_id and art_id.isdigit():\n\t\t\targs = (subject, content, art_id)\n\t\t\tupdate_record(args)\n\t\telse:\n\t\t\targs = (subject, content)\n\t\t\tinsert_record(args)\n\treturn redirect(url_for('get_ascii_art'))\n\t\nif __name__ == '__main__':\n\tapp.run(port=8000, debug=True)\n"
},
{
"alpha_fraction": 0.6668380498886108,
"alphanum_fraction": 0.6791774034500122,
"avg_line_length": 11.54838752746582,
"blob_id": "31d9927c8008f5af37f913fa80d3b0d33eebfef5",
"content_id": "5dae49161bd59af03c97580b6c077e862d35e3a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2945,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 155,
"path": "/udacity/cs253/Integration/_01-04_with_vuejs/static/nba/readme.md",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "Component template should contain exactly one root element.\n\n要把script给圈起来\nhttp://www.cnblogs.com/liziyou/p/6708537.html\n\n\n全局变量要放在最上面,否则\n\nfunc ref\n\nglobal var\n\nfunc def\n\n这样是没法引用到全局变量的\n\n\n##\tTODO: 增加radar chart\n\n*\tcomponent\n*\ttemplate\n*\t数据传递\n\n\nfilters似乎可以用methods里面定义一个function来实现其功能,比如改编一下文档里面的demo\n\n```html\n<div id=\"app\">\n\t<div v-bind:id=\"id | add1\"></div>\n\t<div v-bind:id=\"add2(id)\"></div>\n</div>\n```\n\n```javascript\nvar app = new Vue({\n\tel: '#app',\n\tdata: {\n\t\tid: 1\n\t},\n\tfilters:{\n\t\tadd1: function(id){\n\t\t\treturn id + 1\n\t\t}\n\t},\n\tmethods:{\n\t\tadd2: function(id){\n\t\t\treturn id + 2\n\t\t}\n\t}\n})\n```\n\n\nadd1和add2可以实现基本相同的功能。\n\n所以,vuejs中的filters有什么特别之处/优点?有那些场景只能使用filter?\n\n暂时想到的是,如果定义一个global的filter,那它可以被共用,不知道这个想法是否正确。如果正确,那么local的filters有哪些特别的用处吗?\n\n\nfailed to resolve directive\n\n时有时无\n\n根据console的提示找了半天不知道哪里错了\n\n最后发现 html 里面多了个 v-\n\n\n\n重新安排一下数据结构\n不再是conference --- region --- teamList\n\n而是直接给出teamList,把联盟和赛区,作为每个team的属性之一\n至于list的顺序,则按照原来的conference --- region --- teamList顺序\n\nteam = {\n\tteamListId: '',\t// 内部使用\n\tteamIdQq: '',\t// 与球队主页链接、logo图片链接有关\n\tname: '',\n\tconference: '',\n\tregion: '',\n\tstat: {},\n}\n\nteamList = [{team1}, {team2}, {team3}, ... ]\n\n\nnba = {\n\tteamList: [],\n\teast: {\n\t\t'': [idList],\n\t\t'':\n\t},\n\twest: {\n\n\t}\n}\n\n\n\n\n总的 statList 保存为 全局变量,这样 app 的 主component 和 子component 之间,只需要传id,而不需要传递一个完整的 stat,这样有了id之后,就可以通过全局statList去索引指定id的stat\n\n\n\nNBA\n\n##\t页面内容:\n\n1.\t第一层\n\n\t大标题\n\n2.\t第二层\n\n\t左右两个table,东西联盟\n\n\t6个赛区\n\n\t每个赛区,队标+队名,队名链接指向 nba.stats.qq.com/team/?id=id\n\n\t鼠标移到第二层不同的队名时,前端会在第三层显示该队的数据排名\n\n3.\t第三层\n\n\tsvg,雷达图\n\n##\t数据\n\n*\t后端不使用db,只保存3个json文件,每天 crontab 定时用 update.py 更新\n\n\t通过py,得到 teams_raw.json 和 stats.json,提取数据,合成 teams.json\n\n\t回传给浏览器的只有 teams.json\n\n*\t浏览器请求页面时,返回模版nba.html\n\n\t通过html内的src,使浏览器再请求css, js\n\n\t通过js,请求 teams.json\n\n\t通过js,请求各队 logo ,文件名 ${id}.png\n\n##\t后端工作\n\n*\trouter/WSGI\n\n\t/nba ---> NBAHandler ---> get() ---> return render_raw(nba.html)\n\n\t/nba?q=json ---> NBAHandler ---> get() ---> return teams.json\n\n##\t前端工作\n\n*\t使用 vuejs 完成数据请求与页面填充,以及 reactive svg\n"
},
{
"alpha_fraction": 0.41059601306915283,
"alphanum_fraction": 0.4282560646533966,
"avg_line_length": 14.620689392089844,
"blob_id": "2db461856c937d04c97a4372fe5dea73ddd40b7c",
"content_id": "c3f7a9bc6c3485c0d5518c83a2f7dbbefb22be54",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 453,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 29,
"path": "/vuejs/demo/fizzbuzz/fizzbuzz.js",
"repo_name": "jJayyyyyyy/network",
"src_encoding": "UTF-8",
"text": "var app = new Vue({\n\tel: '#fizzbuzz',\n\tdata: {\n\t\tnum: null,\n\t\tFB: []\n\t},\n\tmethods: {\n\t\tgetFB: function(){\n\t\t\tvar FB = []\n\t\t\tvar fb = ''\n\t\t\tvar len = Number(this.num)\n\t\t\tfor(let i = 1; i <= len; i++){\n\t\t\t\tif (i % 15 === 0){\n\t\t\t\t\tfb = 'FizzBuzz'\n\t\t\t\t}else if(i % 3 === 0){\n\t\t\t\t\tfb = 'Fizz'\n\t\t\t\t}else if(i % 5 === 0){\n\t\t\t\t\tfb = 'Buzz'\n\t\t\t\t}else{\n\t\t\t\t\tfb = i\n\t\t\t\t}\n\t\t\t\tFB.push(fb)\n\t\t\t}\n\t\t\tthis.FB = FB\n\t\t\tthis.show = !this.show\n\t\t\tconsole.log(FB)\n\t\t}\n\t}\n})\n"
}
] | 91 |
arpius/scripts-utiles | https://github.com/arpius/scripts-utiles | 5ad2e7e07398450ad3c4e8d44b7281594602e525 | 83725ea34eb485e66c0c8ea5cef2f4afc1138af6 | 4c0c3659924fad367b4c42c13d7a51cd4c174cc9 | refs/heads/master | 2021-01-01T05:32:43.056677 | 2019-12-24T15:13:27 | 2019-12-24T15:13:27 | 24,338,565 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7513227462768555,
"alphanum_fraction": 0.7513227462768555,
"avg_line_length": 46.25,
"blob_id": "1afdce977b66df7cd4726fb59e3e92303ac3dc62",
"content_id": "16b13d465c979d382d70daf2387272f5d7f048ae",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 192,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 4,
"path": "/README.md",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "Scripts útiles\n==============\nRepositorio con una serie de scripts que he creado para realizar tareas en mi ordenador personal.\nSeguramente serán muy mejorables, pero a mi me son útiles :D\n"
},
{
"alpha_fraction": 0.6845753788948059,
"alphanum_fraction": 0.6863085031509399,
"avg_line_length": 25.227272033691406,
"blob_id": "4d40b53b06839a113ffa0bc6fceeb3e4e0759d93",
"content_id": "4e220773b8d0aa61925c205692beccf778252a8a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 579,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 22,
"path": "/volcar_archivo.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom sys import argv\nfrom os.path import exists\n\nscript, origen, destino = argv\n\nprint(\"Copiando de {} a {}.\".format(origen, destino))\n\narchivo_orig = open(origen).read()\n\nprint(\"El archivo tiene un tamaño de {} bytes\".format(len(archivo_orig)))\n\nif exists(destino):\n print(\"El archivo de destino [ {} ] ya existe. ¿Desea reemplazarlo?\".format_map(destino))\n print(\"Pulsa ENTER para continuar o CONTROL+C para cancelar...\")\n input()\n\narchivo_dest = open(destino, 'w').write(archivo_orig)\n\nprint(\"Proceso de volcado: OK\")\n"
},
{
"alpha_fraction": 0.46136048436164856,
"alphanum_fraction": 0.4919019043445587,
"avg_line_length": 27.4342098236084,
"blob_id": "741b64bf4bf58bf62263c3aee4f07630559da867",
"content_id": "77ba5bee106198488d3c9236f925bb07296cd0b6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2162,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 76,
"path": "/scp_interactivo.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Jul 23 11:59:08 2015\n\n@author: aritz\n\"\"\"\n# Script para transferir archivos entre 2 equipos mediante el comando scp.\n\nimport os\n\n\ndef menu():\n os.system('clear')\n\n titulo = 'scp interactivo'\n op1 = '1) Copiar archivo remoto a local.'\n op2 = '2) Copiar carpeta remota a local.'\n op3 = '3) Copiar archivo local a remoto.'\n op4 = '4) Copiar carpeta local a remoto.'\n salir = '5) Salir'\n\n menu = '{:-^34}\\n {}\\n {}\\n {}\\n {}\\n {}'.format(titulo, op1, op2, op3,\n op4, salir)\n print(menu)\n\n\ndef pedir_datos():\n usuario = input('Nombre de usuario: ')\n host = input('Host remoto: ')\n archivo = input('Nombre de archivo: ')\n destino = input('Ruta de destino: ')\n\n return usuario, host, archivo, destino\n\n\ndef ejecutar_scp(opcion):\n datos = {}\n datos = pedir_datos()\n\n if opcion == '1':\n # archivo remoto a local\n scp = 'scp -P 777 {}@{}:{} {}'.format(datos[0], datos[1], datos[2],\n datos[3])\n elif opcion == '2':\n # carpeta remota a local\n carpeta = input('Ruta a la carpeta remota: ')\n scp = 'scp -P 777 {}@{}:{} {}/{}'.format(datos[0], datos[1], carpeta,\n datos[3], datos[2])\n elif opcion == '3':\n # archivo local a remoto\n scp = 'scp -P 777 {} {}@{}:{}'.format(datos[2], datos[0], datos[1],\n datos[3])\n elif opcion == '4':\n # carpeta local a remoto\n scp = 'scp -P 777 -r {} {}@{}:{}'.format(datos[2], datos[0], datos[1],\n datos[3])\n\n print('Ejecutando {}... '.format(scp))\n os.system(scp)\n\nwhile True:\n menu()\n opcion = input('\\nElige una opción: ')\n\n if opcion == '1':\n ejecutar_scp(opcion)\n elif opcion == '2':\n ejecutar_scp(opcion)\n elif opcion == '3':\n ejecutar_scp(opcion)\n elif opcion == '4':\n ejecutar_scp(opcion)\n elif opcion == '5':\n print('Hasta otra o/')\n break\n"
},
{
"alpha_fraction": 0.5149398446083069,
"alphanum_fraction": 0.5277454257011414,
"avg_line_length": 24.514850616455078,
"blob_id": "8f6e65244d9406a0fda80670c68b9df3afad16bf",
"content_id": "95838a06f96cb2843577c4242efbeee06fffc3c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2577,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 101,
"path": "/backup.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport subprocess\n\nrepository = 'sftp:gominola:/media/storage/backups'\n\n\ndef show():\n subprocess.run(\n ['restic', '-r', repository, 'snapshots'],\n check=True)\n\n\ndef backup(target):\n subprocess.run(\n ['restic', '-r', repository, 'backup', '--verbose', target],\n check=True)\n\n\ndef remove(target):\n subprocess.run(\n ['restic', '-r', repository, 'forget', target],\n check=True)\n\n\ndef prune(copies=1):\n subprocess.run(\n ['restic', '-r', repository, 'forget',\n '--keep-last={}'.format(copies), '--prune'],\n check=True)\n\n\ndef restore(snapshot, target):\n subprocess.run(\n ['restic', '-r', repository, 'restore', snapshot, '--target', target],\n check=True)\n\n\ndef unlock():\n subprocess.run(\n ['restic', '-r', repository, 'unlock'],\n check=True)\n\n\ndef menu():\n subprocess.run('clear')\n\n title = ' Backing up with Restic '\n op1 = '1) List snapshots.'\n op2 = '2) Make a backup.'\n op3 = '3) Delete backup.'\n op4 = '4) Reduce the amount of snapshots.'\n op5 = '5) Restore from a snapshot.'\n op6 = '6) Unlock the repository.'\n op7 = '7) Exit.'\n\n menu = '{:-^42}\\n {}\\n {}\\n {}\\n {}\\n {}\\n {}\\n {}'.format(\n title, op1, op2, op3, op4, op5, op6, op7)\n\n print(menu)\n\n\ndef main():\n while True:\n menu()\n option = input(\"\\nChoose an option: \")\n continue_text = \"Press the <ENTER> key to continue...\"\n\n if option == '1':\n show()\n input(continue_text)\n elif option == '2':\n target = input(\"Insert backup's target: \")\n backup(target=target)\n input(continue_text)\n elif option == '3':\n target = input(\"Insert the ID of the snapshot to be deleted: \")\n remove(target=target)\n input(continue_text)\n elif option == '4':\n copies = input(\"Insert the number of copies to be kept: \")\n prune(copies=copies)\n input(continue_text)\n elif option == '5':\n snapshot = input(\"Insert the ID of the snapshot to be restored: \")\n target = input(\"Insert where it will be restored: \")\n restore(snapshot=snapshot, target=target)\n input(continue_text)\n elif option == '6':\n unlock()\n input(continue_text)\n elif option == '7':\n print(\"See you!\")\n break\n else:\n print(\"Incorrect option!\")\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.5765853524208069,
"alphanum_fraction": 0.585853636264801,
"avg_line_length": 25.623376846313477,
"blob_id": "19f2102e3dd118bde93b9218a34159119ffabf88",
"content_id": "3a8df926bbf85d75001e489a5c544f6b967da309",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2057,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 77,
"path": "/conversor_ebooks.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# Script para convertir libros electrónicos a otro formato.\n# Calibre debe estar instalado.\n\nimport subprocess\nimport os\n\n\ndef menu():\n subprocess.call(\"clear\")\n\n titulo = ' Menú '\n op1 = '1) Listar el contenido de la carpeta.'\n op2 = '2) Sustituir espacios en blanco por guiones bajos.'\n op3 = '3) Convertir ebook a otro formato.'\n op4 = '4) Salir.'\n\n menu = '{:-^52}\\n {}\\n {}\\n {}\\n {}'.format(titulo, op1, op2, op3, op4)\n\n print(menu)\n\n\ndef listar():\n subprocess.call([\"ls\", \"-lh\"])\n input(\"\\nPulsa intro para continuar...\")\n\n\ndef renombrar():\n files = os.listdir() # listamos el contenido del directorio\n\n for file in files:\n # sustituimos los posibles espacios en blanco por guiones bajos y\n # renombramos el archivo\n os.rename(file, \"_\".join(file.split()))\n\n\ndef convertir(origen, destino):\n files = os.listdir()\n\n for file in files:\n if os.path.isfile(file): # si es un archivo\n if file.endswith(origen) is True: # si la extension es 'origen'\n print(\"{} se convertirá a {}\".format(file, destino))\n\n # usamos calibre para convertir a otro formato\n subprocess.call([\"ebook-convert\", file, \"{}.{}\".format(file, destino)])\n\n\ndef corregir(origen, destino):\n files = os.listdir()\n\n for file in files:\n if os.path.isfile(file):\n if file.endswith(destino) is True:\n # eliminamos la extensión antigua del nombre final\n os.rename(file, file.replace(\".{}.\".format(origen), \".\"))\n\nwhile True:\n menu()\n opcion = input(\"\\nElige una opción: \")\n\n if opcion == \"1\":\n listar()\n elif opcion == \"2\":\n renombrar()\n elif opcion == \"3\":\n origen = input(\"Formato de origen: \")\n destino = input(\"Formato de destino: \")\n convertir(origen, destino)\n corregir(origen, destino)\n elif opcion == \"4\":\n print(\"Aguuur\")\n break\n else:\n print(\"Opción no válida\")\n"
},
{
"alpha_fraction": 0.6991150379180908,
"alphanum_fraction": 0.7018805146217346,
"avg_line_length": 25.58823585510254,
"blob_id": "697ba7b3900f7bf44b62f658752d2e7865f58851",
"content_id": "28315192326be57ff04204d6d2c1463b3cba96fe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1813,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 68,
"path": "/correo/enviar_correo.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nScript para enviar correo por consola.\n\"\"\"\nimport smtplib\nimport yaml\nfrom datetime import datetime\nfrom email import encoders\nfrom email.mime.base import MIMEBase\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\n\n\nwith open('config/config.yml', 'r') as config:\n try:\n configuracion = yaml.load(config)\n except yaml.YAMLError as error:\n print(error)\n\n\n# Parámetros de configuración para el mensaje de correo\nEMISOR = configuracion['correo']['emisor']\nRECEPTOR = configuracion['correo']['receptor']\nASUNTO = configuracion['correo']['asunto']\nMENSAJE = configuracion['correo']['mensaje']\nADJUNTO = configuracion['correo']['adjunto']\n\n# Parámetros de configuración del servidor de correo\nSMTP = configuracion['servidor']['saliente']\nPUERTO = configuracion['servidor']['puerto']\nCLAVE = configuracion['servidor']['clave']\n\n\ncorreo = MIMEMultipart()\ncorreo['From'] = EMISOR\ncorreo['To'] = ','.join(RECEPTOR)\ncorreo['Subject'] = ASUNTO\ncorreo.attach(MIMEText(MENSAJE, 'plain'))\n\n\nfor adj in ADJUNTO:\n archivo = open(adj, \"rb\")\n\n adjunto = MIMEBase('application', 'octet-stream')\n adjunto.set_payload(archivo.read())\n encoders.encode_base64(adjunto)\n adjunto.add_header('Content-Disposition', \"attachment; filename={}\".format(\n adj))\n\n correo.attach(adjunto)\n\n\nservidor = smtplib.SMTP(SMTP, PUERTO)\nservidor.starttls()\nservidor.login(EMISOR, CLAVE)\n\ntexto = correo.as_string()\n\nservidor.sendmail(EMISOR, RECEPTOR, texto)\nservidor.quit()\n\nprint('{:-^42}'.format(' Correo electrónico '))\nprint('[Enviado por] {}'.format(EMISOR))\nprint('[Recibido por] {}'.format(RECEPTOR))\nprint('[Fecha] {:%Y-%m-%d %H:%M}'.format(datetime.now()))\nprint('[Adjuntos] {}'.format(ADJUNTO))\n"
},
{
"alpha_fraction": 0.5702970027923584,
"alphanum_fraction": 0.6376237869262695,
"avg_line_length": 24.25,
"blob_id": "5199e7e6a4759fa22922f730f7cc71fc462f3e6c",
"content_id": "90ab5989530cd48c8ab9bcd3cda40b9c3028ced4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 505,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 20,
"path": "/hosts_en_pie.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport nmap\n\nnm = nmap.PortScanner()\nnodos = input('IP o rango de IPs a escanear: ')\n\nwhile len(nodos) == 0:\n nodos = input('IP o rango de IPs a escanear: ')\n\nnm.scan(hosts=nodos, arguments='-n -sP -PE -PA21,22,23,53,80,137,138,139,443,445,631,3389')\nlistado = [(x, nm[x]['status']['state']) for x in nm.all_hosts()]\narchivo = open('escaneado.txt', 'w')\n\nfor nodo, estado in listado:\n print(nodo, estado)\n archivo.write(nodo+ '\\n')\n\narchivo.close()\n"
},
{
"alpha_fraction": 0.6880131363868713,
"alphanum_fraction": 0.7011494040489197,
"avg_line_length": 24.375,
"blob_id": "1b618a11f535fc697d2ab0e248a0127849bc8cae",
"content_id": "d2ade73717f92b29d20a263d39554334d4ce5c06",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 612,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 24,
"path": "/generador_clave.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n\"\"\"\nScript que genera una clave aleatoria que contiene letras mayúsculas,\nminúsculas, números y signos.\nPor defecto la longitud de la clave es de 12 caracteres.\n\"\"\"\n\nfrom random import choice\nimport string\nimport click\n\n\[email protected]()\[email protected]('-l', '--longitud', default=12, help='Longitud de la clave.')\ndef generador_clave(longitud=12):\n caracteres = string.ascii_letters + string.digits + string.punctuation\n clave = ''.join(choice(caracteres) for i in range(longitud))\n click.echo(clave)\n\n\nif __name__ == \"__main__\":\n generador_clave()\n"
},
{
"alpha_fraction": 0.739130437374115,
"alphanum_fraction": 0.739130437374115,
"avg_line_length": 25.428571701049805,
"blob_id": "176162b61c3cd89b8f52acfbc2469f73cc54df07",
"content_id": "ea8f5e0784c5967cd2809ea2e604aa65669eaf02",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 184,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 7,
"path": "/actualizar_raspbian.sh",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n# Script para actualizar los repositorios de software\n# y el firmware de la raspberry Pi\n\nsudo aptitude update -y && \\\nsudo aptitude upgrade -y && \\\nsudo rpi-update"
},
{
"alpha_fraction": 0.7190332412719727,
"alphanum_fraction": 0.7341389656066895,
"avg_line_length": 32.099998474121094,
"blob_id": "9df1f9b4235a60458f19e2b58feeb4afc044f74f",
"content_id": "e44d80c0b92b7ed7f9356cf3504758f53835540f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 669,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 20,
"path": "/resize_imgs.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nScript que redimensiona imágenes para subirlas a la web o mandar por correo.\nImagemagick debe estar instalado.\nmogrify -resize ANCHOxALTO [-quality CALIDAD] ruta_de_las_imagenes/*.EXTENSIÓN\n\"\"\"\n\nimport os\nimport subprocess\n\nprint(\"Introduce la ruta completa de la imágen que quieres redimensionar\")\nprint(\"En caso de que quieras redimensionar todas las imágenes de un mismo\"\n \"tipo escribe: ruta_imagenes/*.extensión\")\nprint(\"¡OJOCUIDAO este script SOBRESCRIBE la imagen original!\")\n\nruta_img = input(\"Ruta de la imágen: \")\n\nsubprocess.call([\"mogrify\", \"-resize\", \"1024x768\", \"-quality\", \"94%\", ruta_img])\n"
},
{
"alpha_fraction": 0.5669882297515869,
"alphanum_fraction": 0.5691318511962891,
"avg_line_length": 24.91666603088379,
"blob_id": "22a2abd5466ea934a861539300cb39c4c0e5d084",
"content_id": "d22e18e63651f3e1ffaa922226f298543d88ef46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 933,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 36,
"path": "/space_replacement.py",
"repo_name": "arpius/scripts-utiles",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport argparse\nimport os\nimport sys\n\n\nparser = argparse.ArgumentParser(prog='space_replacement',\n usage='%(prog)s path replacement',\n description='Replaces spaces in a file name.')\n\nparser.add_argument('Path',\n metavar='path',\n type=str,\n help='the files path')\nparser.add_argument('Replacement',\n metavar='replacement',\n type=str,\n help='the replacement character')\n\nargs = parser.parse_args()\npath = args.Path\nreplacement = args.Replacement\n\nif not os.path.isdir(path):\n print(\"The specified path does not exist.\")\n sys.exit()\n\nfiles = os.listdir(path)\n\nfor file in files:\n old_name = os.path.join(path, file)\n new_name = replacement.join(old_name.split())\n\n os.rename(old_name, new_name)\n"
}
] | 11 |
mhelwig/check_headers | https://github.com/mhelwig/check_headers | 6fa78881343a3ed471c5cf67d658ce05e05ef2ea | 4227e5cef28000d9597c1d0e118e816fbb1c4279 | eca60745005db1e199a4e2448898279d963c002c | refs/heads/master | 2021-01-21T14:35:49.430793 | 2017-06-27T11:23:44 | 2017-06-27T11:23:44 | 95,310,136 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7327262759208679,
"alphanum_fraction": 0.7423674464225769,
"avg_line_length": 28.634920120239258,
"blob_id": "ac56ef02797193bce6030d5d5f9fad1ae97a86cf",
"content_id": "6f4b0e275df5d0e8e4ed46b6e509cd1c9bc0132f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1867,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 63,
"path": "/README.md",
"repo_name": "mhelwig/check_headers",
"src_encoding": "UTF-8",
"text": "# check_headers\nSimple script to check for expected header values on multiple domains\n\n## Usage\nAdjust config settings and run with python\n```\npython check_headers.py\n```\n\n\n## Expected output\nSomething like this:\n```\n[URL] https://www.google.com\n [INFO] Following redirect to https://www.google.de/?gfe_rd=cr&ei=4JZOWdu6LrTb8Afp15-wAw\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] http://www.google.com\n [INFO] Following redirect to http://www.google.de/?gfe_rd=cr&ei=4JZOWdjZPK7b8Afht4-4DQ\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] https://www.google.de\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] http://www.google.de\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] https://www.google.fr\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] http://www.google.fr\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] https://www.google.it\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n\n[URL] http://www.google.it\n [OK] x-frame-options: SAMEORIGIN\n [ERROR] Missing strict-transport-security\n [OK] x-xss-protection: 1; mode=block\n [ERROR] Missing x-content-type-options\n```\n"
},
{
"alpha_fraction": 0.5632690787315369,
"alphanum_fraction": 0.5709310173988342,
"avg_line_length": 27.706666946411133,
"blob_id": "46878af48c99f7b4fc4f5aeb1b6b02bd4ae3b78e",
"content_id": "86e1f8218811f77cf57b8d339b936d584602517c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4307,
"license_type": "no_license",
"max_line_length": 232,
"num_lines": 150,
"path": "/check_headers.py",
"repo_name": "mhelwig/check_headers",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\nimport urllib2\nimport re\n\n'''\ncheck_headers.py\n\nChecking http headers for given values\n\nUsage: python check_headers.py\n\nauthor: Michael Helwig (@c0dmtr1x)\nlicense: LGPLv3\n\nAdjust config settings before use\n\n'''\n\n##########\n# config #\n##########\n\n#user-agent\nuseragent = \"Mozilla/5.0\"\n\n#protocol\nprotocols = [\"https\",\"http\"]\n\n# list your domains\ndomains = [\n \"www.google.com\", \n \"www.google.de\", \n \"www.google.fr\", \n \"www.google.it\",\n]\n\n\n# expected headers\nexpected = {\n # check for multiple regex matches\n \"set-cookie\" : [\n { \n \"contains\":[\"HttpOnly\"],\n \"required\": True\n },\n {\n \"contains\":[\"Secure\"],\n \"required\": {\n \"protocol\":[\"https\"]\n }\n \n }\n ],\n\n \"strict-transport-security\": {\n \"contains\":\"max-age=15552000\",\n \"required\":{\n \"protocol\":[\"https\"]\n }\n },\n \"x-xss-protection\":{\n \"value\":\"1; mode=block\",\n \"required\":True\n },\n \"x-content-type-options\":{\n \"value\":\"nosniff\",\n \"required\":True\n },\n \"x-frame-options\":{\n \"value\":\"SAMEORIGIN\",\n \"required\":True\n },\n \"dummy\":{\n \"contains\":\"1\",\n \"required\":False\n }\n}\n\n#timeout for url requests\ntimeout = 5\n\n#follow redirects or not\nfollow_redirects = True\n\n#################\n# header check #\n#################\n\nclass HeaderRedirectHandler(urllib2.HTTPRedirectHandler):\n global follow_redirects\n def http_error_301(self, req, fp, code, msg, headers):\n if not follow_redirects:\n print \" [ERROR] Not following redirect to \" + headers[\"location\"]\n return False\n else:\n print \" [INFO] Following redirect to \" + headers[\"location\"]\n result = urllib2.HTTPRedirectHandler.http_error_301(\n self, req, fp, code, msg, headers)\n result.status = code\n return result\n\n def http_error_302(self, req, fp, code, msg, headers):\n if not follow_redirects:\n print \" [ERROR] Not following redirect to \" + headers[\"location\"]\n return False\n else: \n print \" [INFO] Following redirect to \" + headers[\"location\"]\n result = urllib2.HTTPRedirectHandler.http_error_302(\n self, req, fp, code, msg, headers) \n result.status = code \n return result\n\ndef check_header(info,key,expected_entry,protocol):\n if \"value\" in expected_entry.keys() and expected_entry[\"value\"] == info[key]:\n print \" [OK] \" + key + \": \" + info[key]\n elif \"value\" in expected_entry.keys() and (expected_entry[\"required\"] == True or (isinstance(expected_entry[\"required\"],dict) and \"protocol\" in expected_entry[\"required\"] and protocol in expected_entry[\"required\"][\"protocol\"])):\n print \" [ERROR] Unexpected \" + key + \": \" + info[key]\n if \"contains\" in expected_entry.keys():\n for contains_entry in expected_entry[\"contains\"]:\n if re.match(info[key],contains_entry):\n\t print \" [OK] \" + key +\" matches \" + contains_entry\n elif expected_entry[\"required\"] == True or isinstance(expected_entry[\"required\"],dict) and \"protocol\" in expected_entry[\"required\"] and protocol in expected_entry[\"required\"][\"protocol\"]:\n\t print \" [ERROR] \" + key + \" does not match \" + contains_entry\n\nfor domain in domains:\n for protocol in protocols:\n\turl = protocol + \"://\" + domain\n\tprint \"\\n[URL] \"+ url\n\ttry:\n\t opener = urllib2.build_opener(HeaderRedirectHandler)\n\t opener.addheaders = [('User-Agent', useragent)]\n\t response = opener.open(url,timeout=timeout)\n\t info = response.info()\n\t for key in expected.keys():\n\t\t if key in info:\n if isinstance(expected[key],dict):\n check_header(info,key,expected.get(key),protocol)\n elif isinstance(expected[key],list):\n for entry in expected.get(key):\n check_header(info,key,entry,protocol)\n\t\t else:\n\t\t if expected.get(key)[\"required\"]:\n\t\t\t print \" [ERROR] Missing \" + key\n\n\texcept IOError, e:\n\t if hasattr(e,'code'):\n\t\tprint \" [ERROR] Host responded with status \" + str(e.code)\n\t else:\n\t\tprint \" [ERROR] Could not retrieve url. Skipped.\"\n\n"
}
] | 2 |
oliver-contier/somato_align | https://github.com/oliver-contier/somato_align | 404c7c95ddaa3ffe467d4970e1b3d7597a6317c4 | ad69c150ecff4e0c5f9f66bd9a16c1cd050c001f | 54d2a8c0f9599028e4e487290cc120d5235af240 | refs/heads/master | 2020-08-02T10:38:12.959367 | 2020-06-15T15:05:42 | 2020-06-15T15:05:42 | 211,320,128 | 1 | 1 | null | null | null | null | null | [
{
"alpha_fraction": 0.6028881072998047,
"alphanum_fraction": 0.6131835579872131,
"avg_line_length": 50.9375,
"blob_id": "8edeceae1531622115a8ef5cd2c536b3342c2011",
"content_id": "a6177d1cbf169beac2f8a2036e77a21c3758a401",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7479,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 144,
"path": "/srm/crossval_projection.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport runpy\nimport time\nimport os\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom brainiak.funcalign.rsrm import RSRM\n\n# import io functions from my other srm script\nfile_globals = runpy.run_path('srm_roi.py')\ndatagrabber = file_globals['datagrabber']\nload_data = file_globals['load_data']\ngrab_subject_ids = file_globals['grab_subject_ids']\n\nfile_globals2 = runpy.run_path('decode_random_stimulation.py')\nload_data_and_labels = file_globals2['load_data_and_labels']\n\n\ndef project_data_crossval_periodiconly(run1_arrs,\n run2_arrs,\n k=10,\n niter=20,\n outdir='/data/BnB_USER/oliver/somato/scratch/crossval_projection'):\n \"\"\"\n Project subjects bold data onto shared response space in a cross-validated way. For every subject, an srm is\n trained on the remaining subjects. this srm is trained on the data from run one, and the data from the test\n subject's other run is than projected onto this template. Data is saved in numpy's npy.\n \"\"\"\n # iterate over runs used for training / testing\n for trainrun_idx in range(2):\n # select run used for training and test\n training_arrs = (run1_arrs, run2_arrs)[trainrun_idx]\n test_arrs = (run1_arrs, run2_arrs)[abs(trainrun_idx - 1)]\n # prepare results arrays (for projected data and estimated srm time series)\n results_array = np.zeros(shape=(len(training_arrs), k, training_arrs[0].shape[1]))\n trained_srms_array = np.zeros(shape=(len(run1_arrs), k, training_arrs[0].shape[1]))\n # iterate over testsubjects\n for testsub_idx in range(len(training_arrs)):\n start = time.time()\n print('starting run %i subject %i' % (trainrun_idx, testsub_idx))\n trainsubs_traindata = [x for i, x in enumerate(training_arrs) if i != testsub_idx] # select training data\n testsub_traindata = training_arrs[testsub_idx]\n srm = RSRM(n_iter=niter, features=k) # train srm on training subject's training data\n srm.fit(trainsubs_traindata)\n w, s = srm.transform_subject(testsub_traindata) # estimate test subject's bases\n # reattach weight matrix and individual term to srm instance\n # (to allow transforming test run with builtin brainiak function)\n srm.w_.insert(testsub_idx, w)\n srm.s_.insert(testsub_idx, s)\n projected_data, ind_terms = srm.transform(test_arrs) # project test run into shared space\n testsub_proj = projected_data[testsub_idx] # select projected data from test subject\n results_array[testsub_idx] = testsub_proj # append result to our results arrays\n trained_srms_array[testsub_idx] = srm.r_\n elapsed = time.time() - start\n print('this round took: ', elapsed)\n # save results array for this run\n proj_outpath = pjoin(outdir, 'proj_run%i_is_train.npy' % (trainrun_idx + 1))\n with open(proj_outpath, 'wb') as outf:\n np.save(outf, results_array)\n trained_srms_outpath = pjoin(outdir, 'trainedsrms_run%i_is_train.npy' % (trainrun_idx + 1))\n with open(trained_srms_outpath, 'wb') as outf:\n np.save(outf, trained_srms_array)\n print('done!')\n return None\n\n\ndef project_data_random_to_periodic(ds_dir='/data/project/somato/scratch/dataset',\n roiglmdir='/data/project/somato/scratch/roi_glm/work_basedir/',\n outdir='/data/project/somato/scratch/project_random_stimulation',\n nfeatures=10,\n niter=20,\n exclude_subjects=('fip66'),\n testsubs=False):\n \"\"\"\n Fit SRM to the periodic data (which has been z-scored and appended) with one subject left out\n and project the test-subject's random runs (blocked_design 1 and 2) to that model.\n Save these results as .npy files.\n \"\"\"\n # load data\n print('loading data')\n periodic_data, random1_data, random2_data, \\\n periodic_labels_concat, random_labels = load_data_and_labels(testsubs_=testsubs,\n dsdir=ds_dir,\n roiglm_workdir=roiglmdir,\n excludesubs=exclude_subjects)\n # create output directory\n if not os.path.exists(outdir):\n print('creating directory : ', outdir)\n os.makedirs(outdir)\n # init results array (nsubs, nruns, nfeatures, nvols_random)\n proj_results = np.zeros(shape=(len(periodic_data), 2, nfeatures, random1_data[0].shape[1]))\n # and one for training models with average responses (nsubs, nfeatures, nvols_random)\n train_results = np.zeros(shape=(len(periodic_data), nfeatures, 512)) # TODO don't hardcode last shape value\n # init srm\n srm = RSRM(n_iter=niter, features=nfeatures)\n # iterate over test subjects\n for testsub_idx in range(len(periodic_data)):\n print('starting subject with index : ', testsub_idx)\n # select training subjects periodic data\n training_data = [entry for idx, entry in enumerate(periodic_data) if idx != testsub_idx]\n # fit srm and estimate mapping/basis for test subject\n srm.fit(training_data)\n w, s = srm.transform_subject(periodic_data[testsub_idx])\n srm.w_.insert(testsub_idx, w) # insert basis back to our srm object\n srm.s_.insert(testsub_idx, s)\n # project data from random runs\n proj_random1, ind_terms1 = srm.transform(random1_data)\n proj_random2, ind_terms2 = srm.transform(random2_data)\n # append to results\n proj_results[testsub_idx, 0] = proj_random1[testsub_idx] # first random run\n proj_results[testsub_idx, 1] = proj_random2[testsub_idx] # second random run\n train_results[testsub_idx] = srm.r_\n # save to npy\n print('saving results')\n proj_fname = pjoin(outdir, 'proj_results.npy')\n train_fname = pjoin(outdir, 'train_results.npy')\n for fname, result in zip([proj_fname, train_fname],\n [proj_results, train_results]):\n with open(fname, 'wb') as fhandle:\n np.save(fhandle, result)\n return None\n\n\nif __name__ == '__main__':\n\n # may this script run for the periodic design only or the random data projected on model trained on periodic data.\n whichdesign = 'random'\n assert whichdesign in (['random', 'periodic'])\n\n if whichdesign == 'random':\n project_data_random_to_periodic()\n\n elif whichdesign == 'periodic':\n # todo: make this shorter (not so important now)\n run1_data, run2_data, run3_data, run4_data, run1_masks, run2_masks, run3_masks, run4_masks = datagrabber()\n print('loading run 1 data')\n run1_arrs = load_data(run1_data, run2_data, run1_masks, run2_masks, whichrun=1,\n force_mask_run1=True, zscore=True, nan2num=True)\n print('loading run 2 data')\n run2_arrs = load_data(run1_data, run2_data, run1_masks, run2_masks, whichrun=2,\n force_mask_run1=True, zscore=True, nan2num=True)\n project_data_crossval_periodiconly(run1_arrs, run2_arrs)\n"
},
{
"alpha_fraction": 0.6135244965553284,
"alphanum_fraction": 0.6201425194740295,
"avg_line_length": 47.10612106323242,
"blob_id": "2a644a68aaa71b7cbd3f019c2235cf24ab7a9428",
"content_id": "bd3b8323eaa4d20c5f0d0ac202ce817e84134dcf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11786,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 245,
"path": "/roi_glm/roi_glm_ffx.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n\"\"\"\n# TODO: description\n\"\"\"\n\n\ndef grab_subject_ids(ds_dir='/data/project/somato/scratch/dataset',\n testsubs=False):\n \"\"\"\n Get list of all subject IDs.\n \"\"\"\n import os\n import glob\n sub_ids = [os.path.basename(subdir) for subdir in glob.glob(ds_dir + '/*')]\n if testsubs:\n sub_ids = sub_ids[:testsubs]\n return sub_ids\n\n\ndef create_subject_ffx_wf(sub_id, bet_fracthr, spatial_fwhm, susan_brightthresh, hp_vols, lp_vols, remove_hemi,\n film_thresh, film_model_autocorr, use_derivs, tr, tcon_subtractive, cluster_threshold,\n cluster_thresh_frac, cluster_p, dilate_clusters_voxel, cond_ids, dsdir, work_basedir):\n # todo: new mapnode inputs: cluster_threshold, cluster_p\n \"\"\"\n Make a workflow including preprocessing, first level, and second level GLM analysis for a given subject.\n This pipeline includes:\n - skull stripping\n - spatial smoothing\n - removing the irrelevant hemisphere\n - temporal band pass filter\n - 1st level GLM\n - averaging f-contrasts from 1st level GLM\n - clustering run-wise f-tests, dilating clusters, and returning binary roi mask\n \"\"\"\n\n from nipype.algorithms.modelgen import SpecifyModel\n from nipype.interfaces.fsl import BET, SUSAN, ImageMaths\n from nipype.interfaces.fsl.model import SmoothEstimate, Cluster\n from nipype.interfaces.fsl.maths import TemporalFilter, MathsCommand\n from nipype.interfaces.utility import Function\n from nipype.pipeline.engine import Workflow, Node, MapNode\n from nipype.workflows.fmri.fsl import create_modelfit_workflow\n from nipype.interfaces.fsl.maths import MultiImageMaths\n from nipype.interfaces.utility import IdentityInterface\n import sys\n from os.path import join as pjoin\n import os\n sys.path.insert(0, \"/data/project/somato/raw/code/roi_glm/custom_node_functions.py\")\n # TODO: don't hardcode this\n import custom_node_functions\n\n # set up sub-workflow\n sub_wf = Workflow(name='subject_%s_wf' % sub_id)\n # set up sub-working-directory\n subwf_wd = pjoin(work_basedir, 'subject_ffx_wfs', 'subject_%s_ffx_workdir' % sub_id)\n if not os.path.exists(subwf_wd):\n os.makedirs(subwf_wd)\n sub_wf.base_dir = subwf_wd\n\n # Grab bold files for all four runs of one subject.\n # in the order [d1_d5, d5_d1, blocked_design1, blocked_design2]\n grab_boldfiles = Node(Function(function=custom_node_functions.grab_boldfiles_subject,\n input_names=['sub_id', 'cond_ids', 'ds_dir'], output_names=['boldfiles']),\n name='grab_boldfiles')\n grab_boldfiles.inputs.sub_id = sub_id\n grab_boldfiles.inputs.cond_ids = cond_ids\n grab_boldfiles.inputs.ds_dir = dsdir\n\n getonsets = Node(Function(function=custom_node_functions.grab_blocked_design_onsets_subject,\n input_names=['sub_id', 'prepped_ds_dir'],\n output_names=['blocked_design_onsets_dicts']),\n name='getonsets')\n getonsets.inputs.sub_id = sub_id\n getonsets.inputs.prepped_ds_dir = dsdir\n\n # pass bold files through preprocessing pipeline\n bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True),\n iterfield=['in_file'], name='bet')\n\n pick_mask = Node(Function(function=custom_node_functions.pick_first_mask,\n input_names=['mask_files'], output_names=['first_mask']), name='pick_mask')\n\n # SUSAN smoothing node\n susan = MapNode(SUSAN(fwhm=spatial_fwhm, brightness_threshold=susan_brightthresh),\n iterfield=['in_file'], name='susan')\n\n # bandpass filter node\n bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548,\n lowpass_sigma=lp_vols / 2.3548),\n iterfield=['in_file'], name='bpf')\n\n # cut away hemisphere node\n if remove_hemi == 'r':\n roi_args = '-roi 96 -1 0 -1 0 -1 0 -1'\n elif remove_hemi == 'l':\n roi_args = '-roi 0 96 0 -1 0 -1 0 -1'\n else:\n raise IOError('did not recognite value of remove_hemi %s' % remove_hemi)\n\n cut_hemi_func = MapNode(MathsCommand(), iterfield=['in_file'], name='cut_hemi_func')\n cut_hemi_func.inputs.args = roi_args\n\n cut_hemi_mask = MapNode(MathsCommand(), iterfield=['in_file'], name='cut_hemi_mask')\n cut_hemi_mask.inputs.args = roi_args\n\n # Make Design and Contrasts for that subject\n # subject_info ist a list of two \"Bunches\", each for one run, containing conditions, onsets, durations\n designgen = Node(Function(input_names=['subtractive_contrast', 'blocked_design_onsets_dicts'],\n output_names=['subject_info', 'contrasts'],\n function=custom_node_functions.make_bunch_and_contrasts),\n name='designgen')\n designgen.inputs.subtractive_contrasts = tcon_subtractive\n\n # create 'session_info' for modelfit\n modelspec = MapNode(SpecifyModel(input_units='secs'), name='modelspec',\n iterfield=['functional_runs', 'subject_info'])\n modelspec.inputs.high_pass_filter_cutoff = hp_vols * tr\n modelspec.inputs.time_repetition = tr\n\n flatten_session_infos = Node(Function(input_names=['nested_list'], output_names=['flat_list'],\n function=custom_node_functions.flatten_nested_list),\n name='flatten_session_infos')\n\n # Fist-level workflow\n modelfit = create_modelfit_workflow(f_contrasts=True)\n modelfit.inputs.inputspec.interscan_interval = tr\n modelfit.inputs.inputspec.film_threshold = film_thresh\n modelfit.inputs.inputspec.model_serial_correlations = film_model_autocorr\n modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivs}}\n\n # node that reshapes list of copes returned from modelfit\n cope_sorter = Node(Function(input_names=['copes', 'varcopes', 'contrasts'],\n output_names=['copes', 'varcopes', 'n_runs'],\n function=custom_node_functions.sort_copes),\n name='cope_sorter')\n\n # average zfstats from both runs\n split_zfstats = Node(Function(function=custom_node_functions.split_zfstats_runs,\n input_names=['zfstats_list'],\n output_names=['zfstat_run1', 'zfstat_run2']),\n name='split_zfstats')\n average_zfstats = Node(MultiImageMaths(op_string='-add %s -div 2'), name='mean_images')\n\n # estimate smoothness of 1st lvl zf-files\n smoothest = MapNode(SmoothEstimate(), name='smoothest', iterfield=['mask_file', 'zstat_file'])\n\n cluster = MapNode(Cluster(), name='cluster',\n iterfield=['in_file', 'volume', 'dlh'])\n cluster.inputs.threshold = cluster_threshold\n cluster.inputs.pthreshold = cluster_p\n cluster.inputs.fractional = cluster_thresh_frac\n cluster.inputs.no_table = True\n cluster.inputs.out_threshold_file = True\n cluster.inputs.out_pval_file = True\n cluster.inputs.out_localmax_vol_file = True\n cluster.inputs.out_max_file = True\n cluster.inputs.out_size_file = True\n\n # dilate clusters\n dilate = MapNode(MathsCommand(args='-kernel sphere %i -dilD' % dilate_clusters_voxel),\n iterfield=['in_file'], name='dilate')\n\n # binarize the result to a mask\n binarize_roi = MapNode(ImageMaths(op_string='-nan -thr 0.001 -bin'),\n iterfield=['in_file'], name='binarize_roi')\n\n # connect preprocessing\n sub_wf.connect(grab_boldfiles, 'boldfiles', bet, 'in_file')\n sub_wf.connect(bet, 'out_file', susan, 'in_file')\n sub_wf.connect(susan, 'smoothed_file', bpf, 'in_file')\n sub_wf.connect(bpf, 'out_file', cut_hemi_func, 'in_file')\n sub_wf.connect(bet, 'mask_file', cut_hemi_mask, 'in_file')\n # connect to 1st level model\n sub_wf.connect(cut_hemi_func, 'out_file', modelspec, 'functional_runs')\n sub_wf.connect(getonsets, 'blocked_design_onsets_dicts', designgen, 'blocked_design_onsets_dicts')\n sub_wf.connect(designgen, 'subject_info', modelspec, 'subject_info')\n sub_wf.connect(modelspec, 'session_info', flatten_session_infos, 'nested_list')\n sub_wf.connect(flatten_session_infos, 'flat_list', modelfit, 'inputspec.session_info')\n sub_wf.connect(designgen, 'contrasts', modelfit, 'inputspec.contrasts')\n sub_wf.connect(cut_hemi_func, 'out_file', modelfit, 'inputspec.functional_data')\n # connect to cluster thresholding\n sub_wf.connect(cut_hemi_mask, 'out_file', smoothest, 'mask_file')\n sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', smoothest, 'zstat_file')\n sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', cluster, 'in_file')\n sub_wf.connect(smoothest, 'dlh', cluster, 'dlh')\n sub_wf.connect(smoothest, 'volume', cluster, 'volume')\n sub_wf.connect(cluster, 'threshold_file', dilate, 'in_file')\n sub_wf.connect(dilate, 'out_file', binarize_roi, 'in_file')\n # connect to averaging f-contrasts\n sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', split_zfstats, 'zfstats_list')\n sub_wf.connect(split_zfstats, 'zfstat_run1', average_zfstats, 'in_file')\n sub_wf.connect(split_zfstats, 'zfstat_run2', average_zfstats, 'operand_files')\n # redirect to outputspec\n # TODO: redirekt outputspec to datasink in meta-wf\n outputspec = Node(IdentityInterface(\n fields=['threshold_file', 'index_file', 'pval_file', 'localmax_txt_file']), name='outputspec')\n sub_wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')\n sub_wf.connect(cluster, 'index_file', outputspec, 'index_file')\n sub_wf.connect(cluster, 'pval_file', outputspec, 'pval_file')\n sub_wf.connect(cluster, 'localmax_txt_file', outputspec, 'localmax_txt_file')\n sub_wf.connect(binarize_roi, 'out_file', outputspec, 'roi')\n\n # run subject-lvl workflow\n # sub_wf.write_graph(graph2use='colored', dotfilename='./subwf_graph.dot')\n # sub_wf.run(plugin='MultiProc', plugin_args={'n_procs': 6})\n # sub_wf.run(plugin='CondorDAGMan')\n # sub_wf.run()\n\n return sub_wf\n\n\nif __name__ == '__main__':\n import sys\n\n # catch subject id passed in through runscript\n subidx = int(sys.argv[1])\n subids = grab_subject_ids(ds_dir='/data/project/somato/scratch/dataset', testsubs=False)\n subid = subids[subidx]\n\n # generate workflow\n subwf = create_subject_ffx_wf(sub_id=subid,\n bet_fracthr=.2,\n spatial_fwhm=2,\n susan_brightthresh=1000,\n hp_vols=30.,\n lp_vols=2.,\n remove_hemi='r',\n film_thresh=.001,\n film_model_autocorr=True,\n use_derivs=False,\n tr=2.,\n tcon_subtractive=False,\n cluster_threshold=3.,\n cluster_thresh_frac=True,\n cluster_p=.001,\n dilate_clusters_voxel=2,\n cond_ids=('D1_D5', 'D5_D1', 'blocked_design1', 'blocked_design2'),\n dsdir='/data/project/somato/scratch/dataset',\n work_basedir='/data/project/somato/scratch/roi_glm/work_basedir')\n\n # run\n subwf.write_graph(graph2use='colored', dotfilename='./groupwf_graph.dot')\n # subwf.run()\n subwf.run(plugin='MultiProc', plugin_args={'n_procs': 4})\n"
},
{
"alpha_fraction": 0.5755528211593628,
"alphanum_fraction": 0.591907262802124,
"avg_line_length": 45.84892272949219,
"blob_id": "4ecfb413bb506b998e0b3d9468f3ee4ac427f4b1",
"content_id": "b670b5af7a9e0184d44a427f5b40963b9d409761",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13024,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 278,
"path": "/srm/decode_random_stimulation.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport csv\nimport runpy\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom brainiak.funcalign.rsrm import RSRM\nfrom nilearn.image import load_img\nfrom nilearn.masking import apply_mask\nfrom nilearn.masking import intersect_masks\nfrom scipy.stats import zscore\nfrom sklearn.neighbors import KNeighborsClassifier\n\n# general io functions\nfile_globals = runpy.run_path('srm_roi.py')\nfile_globals2 = runpy.run_path('digit_classification_knn.py')\ndatagrabber = file_globals['datagrabber']\n# load_data = file_globals['load_data']\ngrab_subject_ids = file_globals['grab_subject_ids']\n# functions to get digit indices in periodic runs\nget_digit_indices, digit_indices_to_labels = file_globals2['get_digit_indices'], \\\n file_globals2['digit_indices_to_labels']\n\n\ndef load_and_append_periodic_runs_single_subject(run1_boldfile, run2_boldfile,\n run1_maskfile, run2_maskfile,\n zscorewithinrun=True,\n connected_clusters=False):\n \"\"\"\n For a given subject, load the data from the first two runs (periodic stimulation),\n mask with a union of the run masks\n and z score if desired.\n \"\"\"\n # get union mask\n print('intersecting masks :', '\\t', run1_maskfile, '\\t', 'and', '\\t',run2_maskfile)\n unionmask = intersect_masks([run1_maskfile, run2_maskfile], threshold=0, connected=connected_clusters)\n # load data, apply mask\n run1_arr = apply_mask(load_img(run1_boldfile), mask_img=unionmask).T\n run2_arr = apply_mask(load_img(run2_boldfile), mask_img=unionmask).T\n # zscore within run if desired\n if zscorewithinrun:\n for runimg in [run1_arr, run2_arr]:\n runimg = zscore(np.nan_to_num(runimg), axis=1)\n # concatenate runs\n sub_arr = np.concatenate((run1_arr, run2_arr), axis=1)\n print('finished loading masks: ', run1_maskfile, ' and ', run2_maskfile)\n return sub_arr, unionmask\n\n\ndef load_and_append_periodic_data(run1_data, run2_data,\n run1_masks, run2_masks,\n zscore_withinrun=True):\n \"\"\"\n returns periodic_data, which is list of arrays where each element corresponds to the concatenated periodic runs\n for a given subject.\n And union_masks, which is list of arrays.\n \"\"\"\n periodic_data = []\n union_masks = []\n for sub_idx in range(len(run1_data)):\n sub_arr, unionmask = load_and_append_periodic_runs_single_subject(run1_data[sub_idx], run2_data[sub_idx],\n run1_masks[sub_idx], run2_masks[sub_idx],\n zscorewithinrun=zscore_withinrun)\n periodic_data.append(sub_arr)\n union_masks.append(unionmask)\n print('finished loading subject with index : ', sub_idx)\n return periodic_data, union_masks\n\n\ndef load_masked_random_runs(run3_data,\n run4_data,\n union_masks):\n # TODO: make z-scoring optional\n random1_data = [\n zscore(np.nan_to_num(apply_mask(load_img(run3_file), mask_img=unionmask).T), axis=1, ddof=1)\n for run3_file, unionmask\n in zip(run3_data, union_masks)\n ]\n random2_data = [\n zscore(np.nan_to_num(apply_mask(load_img(run4_file), mask_img=unionmask).T), axis=1, ddof=1)\n for run4_file, unionmask\n in zip(run4_data, union_masks)]\n return random1_data, random2_data\n\n\ndef fit_srm_and_project_data(periodic_data,\n random1_data, random2_data,\n n_responses=5,\n n_iter=20):\n srm = RSRM(n_iter=n_iter, features=n_responses)\n srm.fit(periodic_data)\n random1_projected, random1_indterms = srm.transform(random1_data)\n random2_projected, random2_indterms = srm.transform(random2_data)\n periodic_data_projected, periodic_indterms = srm.transform(periodic_data)\n # TODO: don't return indterms\n return srm, random1_projected, random2_projected, periodic_data_projected\n\n\ndef get_onsets_randomruns(sub_ids,\n prepped_ds_dir):\n # array with onsets of shape nsubs, nruns, ndigits\n onsets_array = np.zeros(shape=(len(sub_ids), 2, 5, 10))\n for sub_idx, sub_id in enumerate(sub_ids):\n for run_idx, run_str in enumerate(['blocked_design1', 'blocked_design2']):\n for dig_idx, dig_int in enumerate(range(1, 6)):\n dig_abspath = pjoin(prepped_ds_dir, sub_id, run_str, 'D%i.ons' % dig_int)\n with open(dig_abspath, 'r') as f:\n csv_reader = csv.reader(f, delimiter='\\n')\n dig_onsets = [float(row[0]) for row in csv_reader]\n onsets_array[sub_idx, run_idx, dig_idx] = dig_onsets\n return onsets_array\n\n\ndef randomruns_onsets_to_labels(randomruns_onsets_array,\n stimdur=5.12,\n tr=2.,\n nvols_in_random_run=212):\n stimdur_ms = int(stimdur * 100)\n nsubs, nruns, ndigits, nonsets_per_digit = randomruns_onsets_array.shape\n labels_ms = np.zeros(shape=(nsubs, nruns, int(nvols_in_random_run * tr * 100)))\n for sub_idx in range(nsubs):\n for run_idx in range(nruns):\n for digit_idx, digit_id in enumerate(range(1, ndigits + 1)):\n dig_onsets = randomruns_onsets_array[sub_idx, run_idx, digit_idx]\n for ons in dig_onsets:\n ons_ms = int(ons * 100)\n labels_ms[sub_idx, run_idx, int(ons_ms):int(ons_ms + stimdur_ms)] = digit_id\n random_labels = labels_ms[:, :, ::int(100 * tr)]\n return random_labels\n\n\ndef make_periodic_labels(stimdur=5.12,\n tr=2.,\n nvols_periodic_run=256):\n # TODO: replace old digit_indices functions with this one\n stimdur_ms = int(stimdur * 100)\n nruns, ndigits, nonsets_per_digit = 2, 5, 20\n all_onsets_s = np.arange(0, nvols_periodic_run * tr, stimdur)\n all_onsets_ms = all_onsets_s * 100\n # initiate upscaled array\n periodic_labels_ms_2d = np.zeros(shape=(nruns, nvols_periodic_run * 100 * int(tr)))\n periodic_labels_2d = np.zeros(shape=(nruns, nvols_periodic_run))\n # iterate for first run, create second by flipping\n for dig_idx, dig_id in enumerate(range(1, 6)):\n dig_onsets_ms = all_onsets_ms[dig_idx::5]\n for ons_ms in dig_onsets_ms:\n periodic_labels_ms_2d[0, int(ons_ms):int(ons_ms + stimdur_ms)] = dig_id\n # downscale\n periodic_labels_2d[0] = periodic_labels_ms_2d[0, ::int(tr * 100)]\n # labels for second run by flipping those of first run\n periodic_labels_2d[1] = np.flip(periodic_labels_2d[0])\n # also concatenate for convenience\n periodic_labels_concat = periodic_labels_2d.flatten()\n return periodic_labels_2d, periodic_labels_concat\n\n\ndef load_data_and_labels(testsubs_=False,\n dsdir='/data/project/somato/scratch/dataset',\n roiglm_workdir='/data/project/somato/scratch/roi_glm/work_basedir/',\n excludesubs=()):\n \"\"\"\n Grab data and labels, fit srm and project data.\n Write as function as prestep for different potential classifiers.\n \"\"\"\n sub_ids = grab_subject_ids(ds_dir=dsdir, testsubs=testsubs_, exclude_subs=excludesubs)\n\n print('get labels')\n # get digit labels for periodic data and randomized data\n periodic_labels_2d, periodic_labels_concat = make_periodic_labels()\n random_onsets = get_onsets_randomruns(sub_ids, prepped_ds_dir=dsdir)\n random_labels = randomruns_onsets_to_labels(random_onsets)\n\n # get relevant file_paths\n print('datagrabber')\n run1_data, run2_data, \\\n run3_data, run4_data, \\\n run1_masks, run2_masks, \\\n run3_masks, run4_masks = datagrabber(roi_glm_workdir=roiglm_workdir,\n prepped_dsdir=dsdir,\n testsubs=testsubs_, excludesubs=excludesubs)\n print('load_and_append_periodic')\n # load concatenated periodic data\n periodic_data, union_masks = load_and_append_periodic_data(run1_data, run2_data,\n run1_masks, run2_masks,\n zscore_withinrun=True)\n print('load and mask random runs')\n # load data from runs with randomized digit stimulation\n random1_data, random2_data = load_masked_random_runs(run3_data, run4_data, union_masks)\n print('fit srm and project')\n return periodic_data, random1_data, random2_data, periodic_labels_concat, random_labels\n\n\ndef fit_and_project(periodic_data,\n random1_data, random2_data,\n n_iters_srm=20,\n nfeatures=5):\n # fit srm and project randomized data to shared space\n srm, random1_projected, random2_projected, \\\n periodic_data_projected_list = fit_srm_and_project_data(periodic_data,\n random1_data,\n random2_data,\n n_responses=nfeatures,\n n_iter=n_iters_srm)\n\n return srm, random1_projected, random2_projected, periodic_data_projected_list\n\n\ndef knn_randomdata_given_neighs(periodic_data_projected_list,\n random1_projected, random2_projected,\n periodic_labels_concat, random_labels,\n nneighs=5):\n \"\"\"\n Take data prepared with load_data_and_labels and run a knn with given nneighs.\n returns accuracy_array with shape (nsubs, nruns)\n \"\"\"\n # initiate accuracy array of shape: nsubs, nruns\n acc_array = np.zeros(shape=(len(random1_projected), 2))\n for testsub_idx in range(len(random1_projected)):\n for testrun_idx, testrun_data in enumerate([random1_projected, random2_projected]):\n training_data_list = [subdata for idx, subdata in enumerate(periodic_data_projected_list)\n if idx != testsub_idx]\n training_data_arr = np.concatenate(training_data_list, axis=1)\n training_labels = np.tile(periodic_labels_concat, len(training_data_list))\n neigh = KNeighborsClassifier(n_neighbors=nneighs)\n neigh.fit(training_data_arr.T, training_labels)\n testdata = np.nan_to_num(testrun_data[testsub_idx])\n test_labels = random_labels[testsub_idx, testrun_idx]\n acc_array[testsub_idx, testrun_idx] = neigh.score(testdata.T, test_labels)\n\n return acc_array\n\n\ndef iterate_knn_over_nneighs_nfeaturs(nfeatures_range=(5, 8, 10, 15, 20, 50, 100),\n nneighs_range=(5, 8, 10, 15, 20, 50, 100),\n outdir='/data/project/somato/scratch/decode_random_stimulation',\n exclude_subs=()):\n # TODO: this is the top level function. also specify all kwargs for lower level functions here.\n # TODO: docstring\n\n # load data\n print('loading data')\n periodic_data, \\\n random1_data, random2_data, \\\n periodic_labels_concat, random_labels = load_data_and_labels(excludesubs=exclude_subs)\n\n # prepare empty results array of shape (nfeatures_range, nneighs_range, nsubs, nruns)\n results = np.zeros(shape=(len(nfeatures_range), len(nneighs_range), len(random1_data), 2))\n\n for feat_idx, nfeatures_ in enumerate(nfeatures_range):\n # fit srm\n print('fitting srm with ', str(nfeatures_))\n srm, random1_projected, random2_projected, \\\n periodic_data_projected_list = fit_and_project(periodic_data,\n random1_data, random2_data, nfeatures=nfeatures_)\n\n for neighs_idx, nneighs_ in enumerate(nneighs_range):\n print('starting classification with ', str(nneighs_))\n # do classification\n acc_arr_ = knn_randomdata_given_neighs(periodic_data_projected_list,\n random1_projected, random2_projected,\n periodic_labels_concat, random_labels,\n nneighs=nneighs_)\n # append results\n results[feat_idx, neighs_idx] = acc_arr_\n\n # save result\n with open(pjoin(outdir, 'decode_random_stimulation.npy'), 'wb') as f:\n np.save(f, results)\n\n return None\n\n\n# TODO: same vor SVC classifier\n\n\nif __name__ == '__main__':\n iterate_knn_over_nneighs_nfeaturs(exclude_subs=('fip66')) # TODO check out what's wrong with fip66 data\n"
},
{
"alpha_fraction": 0.7424242496490479,
"alphanum_fraction": 0.75,
"avg_line_length": 21,
"blob_id": "73a265ea9a78103ee0aaa21150a1e46cc753d95e",
"content_id": "551af6a1cd99624649715c7f83a1ee8c7c3b693e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 132,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 6,
"path": "/roi_glm/run_roi_glm.sh",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\nsource /data/project/somato/raw/venvs/somato_env/bin/activate\nsource /etc/fsl/fsl.sh\n\npython roi_glm_ffx.py $1\n"
},
{
"alpha_fraction": 0.5881718397140503,
"alphanum_fraction": 0.5962610244750977,
"avg_line_length": 39.021583557128906,
"blob_id": "065e1d2f912c301bead40683e7426be15a6a5c2e",
"content_id": "99732a703417c5a57baf4127dac72e9a5136d251",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5563,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 139,
"path": "/ica/melodic/melodic_nipype.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n#SBATCH --job-name=subject_melodic\n#SBATCH --output=logs/multiprocess_%j.out\n#SBATCH --time=08:00:00\n#SBATCH --nodes=2\n#SBATCH --exclusive\n#SBATCH --ntasks=4\n\nfrom os.path import join as pjoin\n\nfrom nipype.interfaces.fsl import MELODIC, BET, SUSAN\nfrom nipype.interfaces.fsl.maths import TemporalFilter\nfrom nipype.interfaces.utility import Function\nfrom nipype.pipeline.engine import Workflow, Node, MapNode\n\n\ndef grab_somato_data(ds_dir='/home/BnB_User/oliver/somato/scratch/dataset',\n condition_names=('D1_D5', 'D5_D1'),\n testing=True):\n \"\"\"\n Grab functional data and attached metainfo (subject and run).\n if testing is True, only first two subjects' data will be grabbed.\n \"\"\"\n import glob\n import os\n from os.path import join as pjoin\n # subject directories (for development, only pick first two subjects)\n subject_dirs = [fpath for fpath in glob.glob(pjoin(ds_dir, '*'))]\n if testing:\n subject_dirs = subject_dirs[:2]\n subject_ids = [os.path.basename(abf) for abf in subject_dirs]\n # if analvl is group or run, return a simple list of all bold files\n bold_files = [pjoin(subj_dir, cond, 'data.nii.gz')\n for subj_dir in subject_dirs\n for cond in condition_names]\n return bold_files, subject_ids, condition_names\n\n\ndef reshape_flist(boldlist_flat, masklist_flat, nconds=2):\n \"\"\"\n turn list of file names into nested list where each sublist contains all conditions for given subjects.\n Also pick only one mask file (from bet) per condition.\n \"\"\"\n import numpy as np\n assert len(boldlist_flat) % nconds == 0\n nsubs = int(len(boldlist_flat) / nconds)\n # turn list of bold files into nested list where each sublist has the files for both conditions of a subject.\n flatarr = np.array(boldlist_flat)\n nestarr = flatarr.reshape(nsubs, nconds)\n boldlist_nested = nestarr.tolist()\n # pick one mask file per subject\n masklist_picked = masklist_flat[::nconds]\n return boldlist_nested, masklist_picked\n\n\ndef create_melodic_wf(wf_basedir='/home/homeGlobal/oli/somato/scratch/ica/MELODIC/melodic_wf_workdir',\n ana_lvl='subject',\n tr=2.,\n test_subs=False,\n out_report=True,\n bet_fracthr=.2,\n susan_fwhm=2.,\n susan_brightthresh=1000,\n hp_vols=25.,\n lp_vols=4.,\n melodic_bgthresh=10.):\n \"\"\"\n # TODO: docstring\n\n Sensible band pass filters suggested by Michael (in volumes):\n - stronger: 4 vols low-pass, 25 vol high-pass\n - weaker: 2 vol los-pass, 30 vol high-pass\n \"\"\"\n\n wf = Workflow(name='somato_melodic_wf')\n assert ana_lvl in ['run', 'subject']\n melodic, workdir = None, None\n\n # datagrabber node\n datagrabber = Node(Function(input_names=['testing'],\n output_names=['bold_files', 'subject_ids', 'condition_names'],\n function=grab_somato_data),\n name='datagrabber')\n datagrabber.inputs.testing = test_subs\n\n # BET node\n bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True),\n iterfield=['in_file'], name='bet')\n\n # SUSAN smoothing node\n susan = MapNode(SUSAN(fwhm=susan_fwhm, brightness_threshold=susan_brightthresh),\n iterfield=['in_file'], name='susan')\n\n bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548,\n lowpass_sigma=lp_vols / 2.3548),\n iterfield=['in_file'], name='bpf')\n\n reshapeflist = Node(Function(input_names=['boldlist_flat', 'masklist_flat'],\n output_names=['boldlist_nested', 'masklist_picked'],\n function=reshape_flist),\n name='reshapeflist')\n\n # construct node or mapnode depending on subject, run, or group level ica\n if ana_lvl == 'subject':\n workdir = pjoin(wf_basedir, 'subject_lvl')\n melodic = MapNode(MELODIC(tr_sec=tr, out_all=True, report=out_report, no_bet=True,\n bg_threshold=melodic_bgthresh, approach='concat'),\n iterfield=['in_files', 'mask'],\n name='melodic')\n\n elif ana_lvl == 'run':\n workdir = pjoin(wf_basedir, 'run_lvl')\n melodic = MapNode(MELODIC(tr_sec=tr, out_all=True, report=out_report, no_bet=True,\n bg_threshold=melodic_bgthresh),\n iterfield=['in_files', 'mask'],\n name='melodic')\n\n wf.connect(datagrabber, 'bold_files', bet, 'in_file')\n wf.connect(bet, 'out_file', susan, 'in_file')\n wf.connect(susan, 'smoothed_file', bpf, 'in_file')\n if ana_lvl == 'subject':\n wf.connect(bpf, 'out_file', reshapeflist, 'boldlist_flat')\n wf.connect(bet, 'mask_file', reshapeflist, 'masklist_flat')\n wf.connect(reshapeflist, 'boldlist_nested', melodic, 'in_files')\n wf.connect(reshapeflist, 'masklist_picked', melodic, 'mask')\n else:\n wf.connect(bpf, 'out_file', melodic, 'in_files')\n wf.connect(bet, 'mask_file', melodic, 'mask')\n\n wf.base_dir = workdir\n return wf\n\n\nif __name__ == '__main__':\n workflow = create_melodic_wf(ana_lvl='subject', test_subs=False)\n workflow.run()\n\n workflow.run(plugin='MultiProc', plugin_args={'n_procs': 4})\n"
},
{
"alpha_fraction": 0.5818526148796082,
"alphanum_fraction": 0.6006870269775391,
"avg_line_length": 40.382354736328125,
"blob_id": "d11a1d515daa8c9688c0b60aa710665fc1efaf4f",
"content_id": "55cbb48482a216df32e3bb0a70f80f6d4924ba6a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8442,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 204,
"path": "/srm/digit_classification_svc.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n\"\"\"\n1. load data from run 1\n2. train SRM on data from first run (leave one subject out)\n3. load data from run 2\n4. Project test subject's data from run 2 into shared space\n5.\n\"\"\"\n\nimport runpy\nimport time\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom brainiak.funcalign.rsrm import RSRM\nfrom scipy import signal, stats\nfrom sklearn.svm import SVC\n\n# import io functions from my other srm script\nfile_globals = runpy.run_path('srm_roi.py')\ndatagrabber = file_globals['datagrabber']\nload_data = file_globals['load_data']\ngrab_subject_ids = file_globals['grab_subject_ids']\n\n\ndef get_digit_indices(n_cycles=20,\n vols=256,\n vols_per_digit=2.56):\n \"\"\"\n Produce two boolean arrays, one for each run.\n Each array has shape (n_digits, n_volumes).\n Use these to select samples in our classification task\n \"\"\"\n # TODO: These indices are only correct if I understand Esther's stimulus timing correctly ...\n\n vols_per_digit_upsampled = int(vols_per_digit * 100)\n digits_run1 = []\n for didx in range(1, 6):\n # create series of 1s for the first finger stimulation\n finger_signal = signal.boxcar(vols_per_digit_upsampled)\n # add zeros before and after accordingly to form first cycle.\n post_padded = np.append(finger_signal, [0] * vols_per_digit_upsampled * (5 - didx))\n first_cycle = np.insert(post_padded, obj=0, values=[0] * vols_per_digit_upsampled * (didx - 1))\n all_cycles = np.tile(first_cycle, n_cycles) # repeat to get all cycles\n # resample to volume space (i.e. take every 100th element)\n # and turn into boolean vector\n digit_bool = all_cycles[::100] > 0.01\n digits_run1.append(digit_bool)\n digits_run1 = np.array(digits_run1)\n digits_run2 = np.flip(digits_run1, axis=0)\n return digits_run1, digits_run2\n\n\ndef digit_indices_to_labels(digits_run1, digits_run2):\n \"\"\"\n Turn the boolean arrays of digit indices\n into 1d arrays with values 1-6\n for use with SVC.\n \"\"\"\n labels_run1, labels_run2 = np.zeros(shape=(256)), np.zeros(shape=(256))\n for finger_i in range(1, 6):\n labels_run1[digits_run1[finger_i - 1]] = finger_i\n labels_run2[digits_run2[finger_i - 1]] = finger_i\n return labels_run1, labels_run2\n\n\ndef deprecated_compute_corrmat_digits(test_data,\n test_digits_arr,\n train_digits_arr,\n trained_srm,\n average_axis=1):\n \"\"\"\n Compute a correlation matrix between the digit segments in a test subject's data from the test run\n and the respective trained shared responses.\n\n data is z-scored and averaged beforehand.\n average_axis=1 indicates averaging over all selected samples of each component,\n which is what I think is intended in the original SRM time segment matching studies.\n \"\"\"\n corr_mtx = np.zeros(shape=(5, 5))\n for i in range(5):\n for j in range(5):\n data = test_data[:, test_digits_arr[i, :]]\n data = np.average(stats.zscore(data, axis=0, ddof=1), axis=average_axis)\n features = trained_srm.r_[:, train_digits_arr[j, :]]\n features = np.average(stats.zscore(features, axis=0, ddof=1), axis=average_axis)\n corr = np.corrcoef(data, features)[0][1]\n corr_mtx[i, j] = corr\n return corr_mtx\n\n\ndef deprecated_corrmat_accuracies(corr_mat):\n \"\"\"\n Take correlation matrix calculated with deprecated_compute_corrmat_digits and\n return list of accuracies with each element representing one digit.\n \"\"\"\n accuracies = []\n for i in range(5):\n row = corr_mat[i, :]\n if np.argmax(row) == i:\n accuracies.append(1)\n else:\n accuracies.append(0)\n return accuracies\n\n\ndef run_crossval_classification_given_k(run1_arrs,\n run2_arrs,\n labels_run1,\n labels_run2,\n k=3,\n niter=20,\n svc_kernel='rbf',\n svc_gamma='auto',\n outdir='/data/BnB_USER/oliver/somato/scratch/digit_classification'):\n \"\"\"\n # TODO: In order to transform data from run 2 to run 1, the ROI masks from both runs have to have the same\n number of voxels. Maybe we should take the union of both masks for this?\n \"\"\"\n\n # prepare empty results array\n # acc_results = np.zeros(shape=(2, len(run1_arrays), 5)) # shape (nruns, nsubs, ndigits)\n acc_results = np.zeros(shape=(2, len(run1_arrs)))\n\n for trainrun_idx in range(2): # iterate over runs\n # select run used for training and test and according digit indices\n training_arrs = (run1_arrs, run2_arrs)[trainrun_idx]\n test_arrs = (run1_arrs, run2_arrs)[abs(trainrun_idx - 1)]\n train_digits = (labels_run1, labels_run2)[trainrun_idx]\n test_digits = (labels_run1, labels_run2)[abs(trainrun_idx - 1)]\n\n # iterate over testsubjects\n for testsub_idx in range(len(training_arrs)):\n start = time.time()\n print('starting run %i subject %i' % (trainrun_idx, testsub_idx))\n\n trainsubs_traindata = [x for i, x in enumerate(training_arrs) if i != testsub_idx] # select training data\n testsub_traindata = training_arrs[testsub_idx]\n\n srm = RSRM(n_iter=niter, features=k) # train srm on training subject's training data\n srm.fit(trainsubs_traindata)\n w, s = srm.transform_subject(testsub_traindata) # estimate test subject's bases\n\n # reattach weight matrix and individual term to srm instance\n # (to allow transforming test run with builtin brainiak function)\n srm.w_.insert(testsub_idx, w)\n srm.s_.insert(testsub_idx, s)\n\n projected_data, ind_terms = srm.transform(test_arrs) # project test run into shared space\n testsub_proj = projected_data[testsub_idx] # select projected data from test subject\n\n # compute correlation matrix\n # corr_mtx = deprecated_compute_corrmat_digits(test_data=testsub_proj, trained_srm=srm,\n # test_digits_arr=test_digits, train_digits_arr=train_digits)\n # compute accuracies\n # accuracies = deprecated_corrmat_accuracies(corr_mtx)\n\n # set up support vector classifier\n clf = SVC(kernel=svc_kernel, gamma=svc_gamma)\n # train classifier\n clf.fit(srm.r_.T, train_digits)\n # score on test data\n score = clf.score(testsub_proj.T, test_digits)\n print('score: ', score)\n acc_results[trainrun_idx, testsub_idx] = score # append result to our results arrays\n elapsed = time.time() - start\n print('this round took: ', elapsed)\n\n # save results array for this run\n acc_outpath = pjoin(outdir, 'accuracies_k%i.npy' % k)\n with open(acc_outpath, 'wb') as outf:\n np.save(outf, acc_results)\n\n print('done!')\n return None\n\n\ndef test_different_ks(ks=(3, 5, 10, 20, 50, 100, 200),\n srm_iter=30):\n \"\"\"\n Run cross-validated classification to over different numbers of shared responses (nfeatures)\n and save the resulting accuracies.\n \"\"\"\n # load data\n run1_data, run2_data, run1_masks, run2_masks = datagrabber()\n print('loading run 1')\n run1_arrs = load_data(run1_data, run2_data, run1_masks, run2_masks, whichrun=1, force_mask_run1=True)\n print('loading run 2')\n run2_arrs = load_data(run1_data, run2_data, run1_masks, run2_masks, whichrun=2, force_mask_run1=True)\n digits_run1, digits_run2 = get_digit_indices()\n labels_run1, labels_run2 = digit_indices_to_labels(digits_run1, digits_run2)\n\n for k in ks:\n print('starting nfeatures :', k)\n run_crossval_classification_given_k(run1_arrs=run1_arrs, run2_arrs=run2_arrs, k=k,\n labels_run1=labels_run1, labels_run2=labels_run2,\n niter=srm_iter)\n\n return None\n\n\nif __name__ == '__main__':\n test_different_ks()\n"
},
{
"alpha_fraction": 0.5532435774803162,
"alphanum_fraction": 0.5667074918746948,
"avg_line_length": 36.70769119262695,
"blob_id": "ecf680b04c8f9e6f4fc563f244615bf43665f5e5",
"content_id": "9565d73e13b79bff12540c363e52604c35f3f8a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2451,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 65,
"path": "/ds_prep/change_header_bold.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n\nimport glob\nimport os\nfrom os.path import join as pjoin\n\nimport nibabel as nib\n\n\ndef get_bold_fnames(ds_dir='/data/project/somato/raw/data/DatenKuehn/',\n conds=('D1_D5', 'D5_D1', 'blocked_design1', 'blocked_design2')):\n \"\"\"\n Get file paths for our bold data for a given run.\n returns boldfiles_dict with conditions as keys and filenames per subject as values\n \"\"\"\n sub_ids = [os.path.basename(subdir)\n for subdir in glob.glob(ds_dir + '/*')\n if '.txt' not in subdir] # skip readme.txt\n boldfiles_dict = {}\n for cond in conds:\n boldfiles_dict[cond] = [pjoin(ds_dir, sub_id, cond, 'data.nii')\n for sub_id in sub_ids]\n return boldfiles_dict, sub_ids\n\n\ndef run_change_headers(boldfiles_dict,\n sub_ids,\n changes_dict,\n outdir='/data/project/somato/scratch/dataset'):\n \"\"\"\n load boldfiles_dict and set some header parameters specified in the changes_dict dict.\n Save these new images in a new dataset under outdir.\n\n original header entry for pixdim was:\n [-1., 1., 1., 1.0000001, 0., 0., 0., 0.]\n Hence, we should set the fourth entry to 2.0 for TR of 2 secs.\n \"\"\"\n for subid in sub_ids:\n for cond in boldfiles_dict.keys():\n # if the sub-outdir does already exist, skip\n outsubdir = pjoin(outdir, subid, cond)\n if not os.path.exists(outsubdir):\n print('starting subject ', subid, ' condition ', cond)\n # make specific outdir\n os.makedirs(outsubdir)\n # pick boldfile of given subj and cond\n bold_fname = boldfiles_dict[cond][sub_ids.index(subid)]\n bold_img = nib.load(bold_fname)\n # change header info\n for changes_key in changes_dict.keys():\n bold_img.header[changes_key] = changes_dict[changes_key]\n # save outfile\n outfile = pjoin(outsubdir, 'data.nii.gz')\n nib.save(bold_img, outfile)\n else:\n print('already exists ', subid, ' ', cond)\n return None\n\n\nif __name__ == '__main__':\n # only change pixdims\n changesdict = {'pixdim': [-1., 1., 1., 1., 2., 0., 0., 0.]}\n boldfilesdict, sub_ids = get_bold_fnames()\n run_change_headers(boldfilesdict, sub_ids, changesdict)\n"
},
{
"alpha_fraction": 0.6191314458847046,
"alphanum_fraction": 0.6349765062332153,
"avg_line_length": 41.25619888305664,
"blob_id": "6d8842a281abddbfe9f91c3601ad6b93a0052435",
"content_id": "6685e172cc9ac08ac2205aa3b535f21cc330078c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5112,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 121,
"path": "/roi_glm/custom_node_functions.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n\ndef grab_boldfiles_subject(sub_id, cond_ids, ds_dir):\n \"\"\"\n Get file paths for our bold data for a given subject.\n \"\"\"\n from os.path import join as pjoin\n boldfiles = [pjoin(ds_dir, sub_id, cond_id, 'data.nii.gz') for cond_id in cond_ids]\n return boldfiles\n\n\ndef grab_blocked_design_onsets_subject(sub_id,\n prepped_ds_dir):\n \"\"\"\n For the given subject, generate a dict with keys blocked_design1 and blocked_design2\n and nested lists as values representing the onsets of the five digits.\n\n note: durations is always 5.12, so no need to load that.\n \"\"\"\n import csv\n from os.path import join as pjoin\n blocked_design_onsets_dicts = {}\n cond_ids = ('blocked_design1', 'blocked_design2')\n for condid in cond_ids:\n blocked_design_onsets_dicts[condid] = []\n for dig_int in range(1, 6):\n dig_abspath = pjoin(prepped_ds_dir, sub_id, condid, 'D%i.ons' % dig_int)\n with open(dig_abspath, 'r') as f:\n csv_reader = csv.reader(f, delimiter='\\n')\n dig_onsets = [float(row[0]) for row in csv_reader]\n blocked_design_onsets_dicts[condid].append(dig_onsets)\n return blocked_design_onsets_dicts\n\n\ndef make_bunch_and_contrasts(blocked_design_onsets_dicts,\n n_cycles=20,\n dur_per_digit=5.12,\n subtractive_contrast=False):\n \"\"\"\n Produce subject_info as required input of SpecifyModel (Bunch containing conditions, onsets, durations)\n and contrasts as input for modelfit workflow.\n\n Subtractive contrasts weights regressors of interest with +4 and all others with -1. in this case, we skip the last\n contrast (because it would be a linear combination of the others).\n Non-subtractive contrast (i.e. one-sample t-test) weights regressor of interest with 1 and all others with 0.\n \"\"\"\n\n from nipype.interfaces.base import Bunch\n cycle_dur = dur_per_digit * 5\n\n # in periodic stimulation runs: onsets are the same for both conditions, just the order of regressors is flipped\n periodic_onsets = [[0 + (digit_idx * dur_per_digit) + (cycle_idx * cycle_dur)\n for cycle_idx in range(n_cycles)]\n for digit_idx in range(5)]\n durations = [[dur_per_digit] * n_cycles for _ in range(5)]\n d1_d5_conditions = ['D_%i' % i for i in range(1, 6)]\n d5_d1_conditions = ['D_%i' % i for i in range(5, 0, -1)]\n\n # blocked_design conditions and onsets\n blocked1_onsets = blocked_design_onsets_dicts['blocked_design1']\n blocked2_onsets = blocked_design_onsets_dicts['blocked_design2']\n\n subject_info = [Bunch(conditions=d1_d5_conditions, onsets=periodic_onsets, durations=durations),\n Bunch(conditions=d5_d1_conditions, onsets=periodic_onsets, durations=durations),\n Bunch(conditions=d1_d5_conditions, onsets=blocked1_onsets, durations=durations),\n Bunch(conditions=d1_d5_conditions, onsets=blocked2_onsets, durations=durations)]\n # t-cotrasts\n t_contrasts = []\n for cond_name in d1_d5_conditions:\n if subtractive_contrast:\n if d1_d5_conditions.index(cond_name) == len(cond_name) - 1:\n continue\n else:\n contrast_vector = [-1, -1, -1, -1]\n contrast_vector.insert(d1_d5_conditions.index(cond_name), 4)\n t_contrasts.append(('tcon_%s' % cond_name, 'T', d1_d5_conditions, contrast_vector))\n else:\n contrast_vector = [0, 0, 0, 0]\n contrast_vector.insert(d1_d5_conditions.index(cond_name), 1)\n t_contrasts.append(('tcon_%s' % cond_name, 'T', d1_d5_conditions, contrast_vector))\n # f-contrast over all t-contrasts\n f_contrast = [('All_Digits', 'F', t_contrasts)]\n contrasts = t_contrasts + f_contrast\n n_copes = len(contrasts)\n return subject_info, contrasts\n\n\ndef flatten_nested_list(nested_list):\n \"\"\"\n Seems like a bit of a design flaw that this is even necessary,\n but oh, well ...\n \"\"\"\n flat_list = [item for sublist in nested_list for item in sublist]\n return flat_list\n\n\ndef sort_copes(copes, varcopes, contrasts):\n import numpy as np\n if not isinstance(copes, list):\n copes = [copes]\n varcopes = [varcopes]\n num_copes = len(contrasts)\n n_runs = len(copes)\n all_copes = np.array(copes).flatten()\n all_varcopes = np.array(varcopes).flatten()\n # outcopes = all_copes.reshape(len(all_copes) / num_copes, num_copes).T.tolist()\n outcopes = all_copes.reshape(int(len(all_copes) / len(copes[0])), len(copes[0])).T.tolist()\n outvarcopes = all_varcopes.reshape(int(len(all_varcopes) / len(varcopes[0])), len(varcopes[0])).T.tolist()\n return outcopes, outvarcopes, n_runs\n\n\ndef split_zfstats_runs(zfstats_list):\n zfstat_run1 = zfstats_list[0]\n zfstat_run2 = [zfstats_list[1]] # operand files have to be a list\n return zfstat_run1, zfstat_run2\n\n\ndef pick_first_mask(mask_files):\n first_mask = mask_files[0]\n return first_mask"
},
{
"alpha_fraction": 0.7579908967018127,
"alphanum_fraction": 0.7808219194412231,
"avg_line_length": 86.5999984741211,
"blob_id": "e8fbc8fce3080af527a22108387440696a211607",
"content_id": "6a3f3877e77039c69cb412884f3149e4ff88d66e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 439,
"license_type": "no_license",
"max_line_length": 200,
"num_lines": 5,
"path": "/README.md",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "# Functional alignment of somatosensory digit representations using 7T fMRI data\n\nCode accompanying poster # 2197 at OHBM 2020. Authors: Oliver Contier, Dr. Esther Kühn, Prof. Michael Hanke.\nThe poster presents preliminary results of a lab rotation by Oliver Contier at the Research Center Jülich, Institute of Neuroscience and Medicine, Department Brain and Behavior (INM-7) headed by Prof. Simon Eickhoff.\nThe lab rotation was part of the Max Planck School of Cognition.\n"
},
{
"alpha_fraction": 0.585538923740387,
"alphanum_fraction": 0.597813606262207,
"avg_line_length": 33.52980041503906,
"blob_id": "85b513ca03f434deabaa8fb28f0f2011c461451c",
"content_id": "fd13d6ff90119a47623c2c54e781837de360ce8e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5214,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 151,
"path": "/srm/srm_roi.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n\"\"\"\n# TODO: docstring\n\nuse nilearn.masking.unmask to retransform masked voxels into whole-brain space.\n\ntroubleshooting brainiak installation problems:\npython3 -m pip install --no-use-pep517 brainiak\n\"\"\"\n\nimport os\nimport pickle\nimport time\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom brainiak.funcalign.rsrm import RSRM\nfrom brainiak.funcalign.srm import SRM\nfrom nilearn.image import load_img\nfrom nilearn.masking import apply_mask\nfrom scipy import stats\n\n\ndef grab_subject_ids(ds_dir='/data/BnB_USER/oliver/somato/scratch/dataset',\n testsubs=False,\n exclude_subs=()):\n \"\"\"\n Get list of all subject IDs.\n \"\"\"\n import os\n import glob\n sub_ids = [os.path.basename(subdir) for subdir in glob.glob(ds_dir + '/*')]\n if testsubs:\n sub_ids = sub_ids[:testsubs]\n if exclude_subs:\n # for exclude_sub in exclude_subs:\n # sub_ids.remove(exclude_sub)\n sub_ids = [subid for subid in sub_ids if subid not in exclude_subs]\n print('subject ids :', sub_ids)\n return sub_ids\n\n\ndef datagrabber(roi_glm_workdir='/data/project/somato/scratch/roi_glm/workdirs/',\n prepped_dsdir='/data/project/somato/scratch/dataset',\n testsubs=False,\n excludesubs=()):\n \"\"\"\n # grab file names for\n # filtered bold data and roi masks from roi_glm output\n \"\"\"\n sub_ids = grab_subject_ids(testsubs=testsubs, ds_dir=prepped_dsdir, exclude_subs=excludesubs)\n run1_data, run2_data, run3_data, run4_data, = [], [], [], []\n run1_masks, run2_masks, run3_masks, run4_masks = [], [], [], []\n for sub_id in sub_ids:\n sub_wf_dir = pjoin(roi_glm_workdir, 'subject_ffx_wfs', 'subject_%s_ffx_workdir' % sub_id,\n 'subject_%s_wf' % sub_id)\n # file names for filtered bold files\n for run_idx, rundata in enumerate([run1_data, run2_data, run3_data, run4_data]):\n fname = pjoin(sub_wf_dir, 'bpf', 'mapflow', '_bpf%i' % run_idx, 'data_brain_smooth_filt.nii.gz')\n if not os.path.exists(fname):\n raise IOError('filtered nifti does not exist : ', fname)\n rundata.append(fname)\n # file names for masks\n for run_idx, runmasks in enumerate([run1_masks, run2_masks, run3_masks, run4_masks]):\n fname = pjoin(sub_wf_dir, 'binarize_roi', 'mapflow',\n '_binarize_roi%i' % run_idx, 'zfstat1_threshold_maths_maths.nii.gz')\n if not os.path.exists(fname):\n raise IOError('mask file does not exist : ', fname)\n runmasks.append(fname)\n\n return run1_data, run2_data, run3_data, run4_data, run1_masks, run2_masks, run3_masks, run4_masks\n\n\ndef load_data(run1_data, run2_data, run1_masks, run2_masks,\n whichrun=1,\n force_mask_run1=False,\n zscore=True,\n nan2num=True):\n \"\"\"\n Load the masked data for a given run in array form\n to suit brainiak input.\n \"\"\"\n if whichrun == 1:\n run_data, run_masks = run1_data, run1_masks\n elif whichrun == 2:\n run_data, run_masks = run2_data, run2_masks\n else:\n raise IOError('did not recognize argument %s for whichrun' % str(whichrun))\n if force_mask_run1:\n run_masks = run1_masks\n print('loading data')\n if zscore:\n run_arrs = [\n stats.zscore( # load image, apply mask, z-score\n apply_mask(load_img(data), mask_img=mask).T,\n axis=1, ddof=1)\n for data, mask in zip(run_data, run_masks)\n ]\n else:\n run_arrs = [apply_mask(load_img(data), mask_img=mask).T\n for data, mask in zip(run_data, run_masks)]\n if nan2num:\n run_arrs = [np.nan_to_num(bold_array) for bold_array in run_arrs]\n return run_arrs\n\n\ndef train_srm(training_data,\n use_robust_srm=True,\n n_comps=10,\n n_iters=4,\n printruntime=True):\n \"\"\"\n Fit srm on training data\n \"\"\"\n if use_robust_srm:\n srm = RSRM(n_iter=n_iters, features=n_comps)\n else:\n srm = SRM(n_iter=n_iters, features=n_comps)\n # fit\n if printruntime:\n start = time.time()\n srm.fit(training_data)\n if printruntime:\n elapsed = time.time() - start\n print('fitting srm took: ', elapsed)\n return srm\n\n\ndef save_srm_as_pickle(srm_instance,\n robust_srm=True,\n pickle_outdir='/home/homeGlobal/oli/somato/scratch/srm_roi'):\n \"\"\"\n \"\"\"\n if robust_srm:\n outpickle = pjoin(pickle_outdir, 'rsrm.p')\n else:\n outpickle = pjoin(pickle_outdir, 'srm.p')\n if not os.path.exists(pickle_outdir):\n os.makedirs(pickle_outdir)\n with open(outpickle, 'wb') as f:\n pickle.dump(srm_instance, f)\n return outpickle\n\n\nif __name__ == '__main__':\n run1_data, run2_data, run3_data, run4_data, \\\n run1_masks, run2_masks, run3_masks, run4_masks = datagrabber(testsubs=3)\n run_arrs = load_data(run1_data, run2_data, run1_masks, run2_masks)\n srm = train_srm(training_data=run_arrs)\n # outpickle = save_srm_as_pickle(srm_instance=srm)\n"
},
{
"alpha_fraction": 0.6431776285171509,
"alphanum_fraction": 0.6613882184028625,
"avg_line_length": 33.135135650634766,
"blob_id": "4c0c14791a122998b01a832a1a1357c18c8f225c",
"content_id": "ce585270ae5be02840b4974c16865f7a9bd84f33",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3789,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 111,
"path": "/ica/CanICA/subject_canica.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n#SBATCH --job-name=subject_canica\n#SBATCH --output=logs/multiprocess_%j.out\n#SBATCH --time=08:00:00\n#SBATCH --nodes=1\n#SBATCH --exclusive\n\n\n\"\"\"\nRun Nilearn's CanICA on subject/condition level and parallelize through slurm for all data.\n\nOnline-Example on interfacing slurm in python script:\nhttps://rcc.uchicago.edu/docs/tutorials/kicp-tutorials/running-jobs.html\n\n# since each cycle lasted 25.6 seconds, that might be a good cutoff for high-pass filtering\n# 1 rep / 25.6 sec = 0.0390625 Hz\n# two cycles = 0.01953125 Hz\n\"\"\"\n\nimport os\nimport glob\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom nilearn.decomposition import CanICA\nfrom nilearn.image import load_img, mean_img\nfrom nilearn.plotting import plot_prob_atlas\n\n\ndef run_canica_subject(sub_id,\n cond_id='D1_D5',\n ds_dir='/data/BnB_USER/oliver/somato',\n out_basedir='/home/homeGlobal/oli/somato/scratch/ica/CanICA',\n ncomps=50, smoothing=3, caa=True, standard=True, detr=True, highpass=.01953125, tr=2.,\n masktype='epi', ninit=10, seed=42, verb=10):\n \"\"\"\n Run Nilearn's CanICA on a single condition of a single subject.\n \"\"\"\n # load example image\n bold_file = pjoin(ds_dir, sub_id, cond_id, 'data.nii.gz')\n bold_img = load_img(bold_file)\n # paths to output\n out_dir = pjoin(out_basedir, sub_id, cond_id)\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n out_comp_nii = pjoin(out_dir, 'components.nii.gz')\n out_components_arr = pjoin(out_dir, 'components.npy')\n out_png = pjoin(out_dir, 'components_probatlas.png')\n\n # set up ica\n ica = CanICA(n_components=ncomps, smoothing_fwhm=smoothing, do_cca=caa, standardize=standard, detrend=detr,\n mask_strategy=masktype, high_pass=highpass, t_r=tr,\n n_init=ninit, random_state=seed, verbose=verb)\n # more interesting arguments\n # mask_strategy='mni_something, mask_args=see nilearn.masking.compute_epi_mask, threshold=3.\n\n # fit ica\n ica.fit(bold_img)\n\n # save components as 4d nifti\n components_img = ica.components_img_\n components_img.to_filename(out_comp_nii)\n # plot components as prob atlas and save plot\n g = plot_prob_atlas(components_img, bg_img=mean_img(bold_img))\n g.savefig(out_png, dpi=300)\n # save components as 2d np array\n components_arr = ica.components_\n np.save(out_components_arr, components_arr)\n # save automatically generated epi mask\n if masktype == 'epi':\n mask_img = ica.mask_img_\n out_mask_img = pjoin(out_dir, 'mask_img.nii.gz')\n mask_img.to_filename(out_mask_img)\n\n return ica # return ica object for later use\n\n\ndef parallelize_canica(i, datadir='/data/BnB_USER/oliver/somato'):\n \"\"\"\n Parallelize the function run_canica_subject with slurm (so that only this script has to be executed).\n perform work associated with step i.\n \"\"\"\n\n # list of all subjects and conditions twice (should be 24)\n subject_ids = [os.path.basename(absp) for absp in glob.glob(pjoin(datadir, '*'))] * 2\n if i >= 12:\n cond = 'D5_D1'\n else:\n cond = 'D1_D5'\n run_canica_subject(sub_id=subject_ids[i], cond_id=cond)\n\n\nif __name__ == '__main__':\n import multiprocessing\n import sys\n\n # necessary to add cwd to path when script run by slurm (since it executes a copy)\n sys.path.append(os.getcwd())\n\n # get number of cpus available to job\n try:\n ncpus = int(os.environ[\"SLURM_JOB_CPUS_PER_NODE\"])\n except KeyError:\n ncpus = multiprocessing.cpu_count()\n\n # create pool of ncpus workers\n pool = multiprocessing.Pool(ncpus)\n\n # apply my ica function in parallel\n pool.map(parallelize_canica, range(24))\n"
},
{
"alpha_fraction": 0.5701168775558472,
"alphanum_fraction": 0.5751252174377441,
"avg_line_length": 36.4375,
"blob_id": "da6f13661a12c3a1eb0c204f5e2bd3f9cdd6bea8",
"content_id": "108d6997a6018ae4bddb4877ea86e216f17185a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1198,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 32,
"path": "/ds_prep/copy_onsets.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport glob\nfrom os.path import join as pjoin\nimport os\nfrom shutil import copyfile\n\n\ndef run_copy_onsets(onsets_orig_dir='/data/project/somato/raw/onsets_from_esther',\n conds=('blocked_design1', 'blocked_design2'),\n ds_prepped_dir='/data/project/somato/scratch/dataset'):\n \"\"\"\n Copy onsets and duration files systematically into the prepped dataset directory.\n \"\"\"\n sub_ids_nosuff = [fullpath.split('/')[-1]\n for fullpath in glob.glob(onsets_orig_dir + '/*')]\n sub_ids = [os.path.basename(subdir)\n for subdir in glob.glob(ds_prepped_dir + '/*')\n if '.txt' not in subdir] # skip readme.txt\n\n for subid_nosuff, subid in zip(sub_ids_nosuff, sub_ids):\n for cond in conds:\n for digit_str in ['D%i' % i for i in range(1, 6)]:\n for ftype in ['.ons', '.dur']:\n source = pjoin(onsets_orig_dir, subid_nosuff, cond, digit_str + ftype)\n dest = pjoin(ds_prepped_dir, subid, cond, digit_str + ftype)\n copyfile(source, dest)\n return None\n\n\nif __name__ == '__main__':\n run_copy_onsets()\n"
},
{
"alpha_fraction": 0.6093876957893372,
"alphanum_fraction": 0.6228682994842529,
"avg_line_length": 46.72868347167969,
"blob_id": "8256078661039ac91ffcf8cb294ae45b835d0496",
"content_id": "fb63a1af7bbc00862e695c7a1a6ed3243083d73a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12314,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 258,
"path": "/srm/digit_classification_knn.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport os\nimport runpy\nimport time\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom brainiak.funcalign.rsrm import RSRM\nfrom scipy import signal\nfrom scipy.stats import zscore\nfrom sklearn.neighbors import KNeighborsClassifier\n\nfile_globals = runpy.run_path('srm_roi.py')\ndatagrabber = file_globals['datagrabber']\nload_data = file_globals['load_data']\ngrab_subject_ids = file_globals['grab_subject_ids']\n\n\ndef get_digit_indices(n_cycles=20,\n vols_per_digit=2.56):\n \"\"\"\n Produce two boolean arrays, one for each run.\n Each array has shape (n_digits, n_volumes).\n Use these to select samples in our classification task\n \"\"\"\n vols_per_digit_upsampled = int(vols_per_digit * 100)\n digits_run1 = []\n for didx in range(1, 6):\n # create series of 1s for the first finger stimulation\n finger_signal = signal.boxcar(vols_per_digit_upsampled)\n # add zeros before and after accordingly to form first cycle.\n post_padded = np.append(finger_signal, [0] * vols_per_digit_upsampled * (5 - didx))\n first_cycle = np.insert(post_padded, obj=0, values=[0] * vols_per_digit_upsampled * (didx - 1))\n all_cycles = np.tile(first_cycle, n_cycles) # repeat to get all cycles\n # resample to volume space (i.e. take every 100th element)\n # and turn into boolean vector\n digit_bool = all_cycles[::100] > 0.01\n digits_run1.append(digit_bool)\n digits_run1 = np.array(digits_run1)\n digits_run2 = np.flip(digits_run1, axis=0)\n return digits_run1, digits_run2\n\n\ndef digit_indices_to_labels(digits_run1, digits_run2):\n \"\"\"\n Turn the boolean arrays of digit indices\n into 1d arrays with values 1-6\n for use with sklearn classifiers.\n \"\"\"\n labels_run1, labels_run2 = np.zeros(shape=256), np.zeros(shape=256)\n for finger_i in range(1, 6):\n labels_run1[digits_run1[finger_i - 1]] = finger_i\n labels_run2[digits_run2[finger_i - 1]] = finger_i\n return labels_run1, labels_run2\n\n\ndef project_data_crossval(run1_arrs,\n run2_arrs,\n nfeatures=10,\n niter=20,\n outdir='/data/BnB_USER/oliver/somato/scratch/crossval_projection'):\n \"\"\"\n Project subjects bold data onto shared response space in a cross-validated way. For every subject, an srm is\n trained on the remaining subjects. this srm is trained on the data from run one, and the data from the test\n subject's other run is than projected onto this template. Data is saved in numpy's npy.\n \"\"\"\n # prepare results arrays (for projected data and estimated srm time series)\n # both arrays have shape (nsubs, nruns, nfeatures, nvols)\n projected_data = np.zeros(shape=(len(run1_arrs), 2, nfeatures, run1_arrs[0].shape[1]))\n trained_srms = np.zeros(shape=(len(run1_arrs), 2, nfeatures, run1_arrs[0].shape[1]))\n # iterate over runs used for training / testing\n for trainrun_idx in range(2):\n # select run used for training and test\n training_run_arrs = (run1_arrs, run2_arrs)[trainrun_idx]\n test_run_arrs = (run1_arrs, run2_arrs)[abs(trainrun_idx - 1)]\n # iterate over testsubjects\n for testsub_idx in range(len(training_run_arrs)):\n start = time.time()\n print('starting projection of run %i subject %i' % (trainrun_idx, testsub_idx))\n # select training and test subject\n trainsubs_traindata = [x for i, x in enumerate(training_run_arrs) if i != testsub_idx]\n testsub_traindata = training_run_arrs[testsub_idx]\n srm = RSRM(n_iter=niter, features=nfeatures) # train srm on training subject's training data\n srm.fit(trainsubs_traindata)\n w, s = srm.transform_subject(testsub_traindata) # estimate test subject's bases\n # reattach weight matrix and individual term to srm instance\n # (to allow transforming test run with builtin brainiak function)\n srm.w_.insert(testsub_idx, w)\n srm.s_.insert(testsub_idx, s)\n allsubs_proj, ind_terms = srm.transform(test_run_arrs) # project test run into shared space\n testsub_proj = allsubs_proj[testsub_idx] # select projected data from test subject\n projected_data[testsub_idx, trainrun_idx, :, :] = testsub_proj\n trained_srms[testsub_idx, trainrun_idx, :, :] = srm.r_\n elapsed = time.time() - start\n print('this round took: ', elapsed)\n\n # save results array to file\n if outdir:\n proj_outpath = pjoin(outdir, 'proj_data_nfeats-%i.npy' % nfeatures)\n with open(proj_outpath, 'wb') as outf:\n np.save(outf, projected_data)\n trained_srms_outpath = pjoin(outdir, 'trainedsrms_nfeats-%i.npy' % nfeatures)\n with open(trained_srms_outpath, 'wb') as outf:\n np.save(outf, trained_srms)\n print('done!')\n return projected_data, trained_srms\n\n\ndef knn_cross_sub_and_run(projected_data,\n nneighs=5,\n zscore_over_all=True):\n \"\"\"\n Train a KNN classifier on all but one subjects data for a given run, and test on the left-out subject for the other\n run. (cross-run cross-subject predictions).\n Returns array with accuracies of shape (nsubs, nruns).\n (remember, input array projected_data has shape (nsubs, nruns, nfeatures, nvols))\n \"\"\"\n # get digit labels\n digits_run1, digits_run2 = get_digit_indices()\n labels_run1, labels_run2 = digit_indices_to_labels(digits_run1, digits_run2)\n # global zscoring of data if desired\n if zscore_over_all:\n projected_data = zscore(projected_data, axis=3)\n # prepare accuracy array\n nsubs, nruns, nfeatures, nvols = projected_data.shape\n accuracies = np.zeros((nsubs, nruns))\n # iterate over subjects and runs\n for sub_i in range(nsubs):\n for testrun_i in range(2):\n # select training and test data and labels\n test_data = projected_data[sub_i, testrun_i, :, :]\n submask, runmask = np.ones(nsubs, dtype=bool), np.ones(2, dtype=bool)\n submask[sub_i] = False\n runmask[testrun_i] = False\n train_data = projected_data[submask, runmask, :, :]\n train_data = train_data.reshape(nfeatures, nvols * (nsubs-1))\n test_labels = [labels_run1, labels_run2][testrun_i]\n train_labels = np.tile([labels_run1, labels_run2][abs(testrun_i - 1)], (nsubs-1))\n # train classifier and score\n neigh = KNeighborsClassifier(n_neighbors=nneighs)\n neigh.fit(train_data.T, train_labels)\n accuracies[sub_i, testrun_i] = neigh.score(test_data.T, test_labels)\n return accuracies\n\n\ndef knn_within_run(projected_data,\n nneighs=5,\n zscore_over_all=True):\n \"\"\"\n Train KNN on all but one subject and test at left-out subject's data from the same run.\n \"\"\"\n # get digit labels\n digits_run1, digits_run2 = get_digit_indices()\n labels_run1, labels_run2 = digit_indices_to_labels(digits_run1, digits_run2)\n # global z-scoring\n if zscore_over_all:\n projected_data = zscore(projected_data, axis=3)\n # prepare accuracy array\n nsubs, nruns, nfeatures, nvols = projected_data.shape\n accuracies = np.zeros((nsubs, nruns))\n for testsub_i in range(nsubs):\n for within_run_i in range(2):\n # select training and test data and labels\n test_data = projected_data[testsub_i, within_run_i, :, :]\n submask = np.ones(nsubs, dtype=bool)\n submask[testsub_i] = False\n train_data = projected_data[submask, within_run_i, :, :]\n train_data = train_data.reshape(nfeatures, nvols * (nsubs-1))\n test_labels = [labels_run1, labels_run2][within_run_i]\n train_labels = np.tile(test_labels, (nsubs-1))\n # train classifier and score\n neigh = KNeighborsClassifier(n_neighbors=nneighs)\n neigh.fit(train_data.T, train_labels)\n accuracies[testsub_i, within_run_i] = neigh.score(test_data.T, test_labels)\n return accuracies\n\n\ndef knn_within_sub(projected_data,\n nneigh=5,\n zscore_over_all=True):\n \"\"\"\n Classify digits within subject, across runs.\n \"\"\"\n digits_run1, digits_run2 = get_digit_indices()\n labels_run1, labels_run2 = digit_indices_to_labels(digits_run1, digits_run2)\n # global z-scoring\n if zscore_over_all:\n projected_data = zscore(projected_data, axis=3)\n # prepare accuracy array\n nsubs, nruns, nfeatures, nvols = projected_data.shape\n results = np.zeros((nsubs, nruns))\n for sub_i in range(nsubs):\n for trainrun_i in range(nruns):\n testrun_i = abs(trainrun_i - 1)\n train_data = projected_data[sub_i, trainrun_i, :, :]\n test_data = projected_data[sub_i, testrun_i, :, :]\n train_labels = [labels_run1, labels_run2][trainrun_i]\n test_labels = [labels_run1, labels_run2][testrun_i]\n neigh = KNeighborsClassifier(n_neighbors=nneigh)\n neigh.fit(train_data.T, train_labels)\n results[sub_i, trainrun_i] = neigh.score(test_data.T, test_labels)\n return results\n\n\ndef classify_over_nfeatures_nneighbors(run1_arrs, run2_arrs,\n nfeat_range=(5, 10, 20, 50, 100),\n nneigh_range=tuple(range(3, 101, 2)),\n proj_outdir='/data/BnB_USER/oliver/somato/scratch/crossval_projection',\n knn_outdir='/data/BnB_USER/oliver/somato/scratch/digit_classification_knn'):\n \"\"\"\n Iterate over different values for the number of features allowed in the SRM\n and number of neighbors the KNN classifier considers.\n Save the whole shebang in npz files in given knn_outdir.\n \"\"\"\n if not os.path.exists(knn_outdir):\n os.makedirs(knn_outdir)\n for nfeat in nfeat_range:\n proj_outpath = pjoin(proj_outdir, 'proj_data_nfeats-%i.npy' % nfeat)\n if os.path.exists(proj_outpath):\n with open(proj_outpath, 'rb') as f:\n projected_data = np.load(f)\n else:\n print('starting projection with %i features' % nfeat)\n projected_data, trained_srms = project_data_crossval(run1_arrs, run2_arrs, nfeatures=nfeat,\n outdir=proj_outdir)\n for nneigh in nneigh_range:\n print('and classification with %i neighbors' % nneigh)\n # cross-subject cross-run classification\n crossall_results = knn_cross_sub_and_run(projected_data, nneighs=nneigh)\n out_fname = pjoin(knn_outdir, 'nfeat-%i_nneigh-%i.npz' % (nfeat, nneigh))\n with open(out_fname, 'wb') as f:\n np.save(f, crossall_results)\n # cross-subject within-run classification\n withinrun_results = knn_within_run(projected_data, nneighs=nneigh)\n withinrun_outfname = pjoin(knn_outdir, 'withinrun_nfeat-%i_nneigh-%i.npz' % (nfeat, nneigh))\n with open(withinrun_outfname, 'wb') as f:\n np.save(f, withinrun_results)\n withinsub_results = knn_within_sub(projected_data, nneigh=nneigh)\n withinsub_outfname = pjoin(knn_outdir, 'withinsub_nfeat-%i_nneigh-%i.npz'% (nfeat, nneigh))\n with open(withinsub_outfname, 'wb') as f:\n np.save(f, withinsub_results)\n print('finished nfeats %i nneighs %i' % (nfeat, nneigh))\n return None\n\n\nif __name__ == '__main__':\n\n # load input data\n run1_data, run2_data, run1_masks, run2_masks = datagrabber()\n print('loading run 1 data')\n run1_arrays = load_data(run1_data, run2_data, run1_masks, run2_masks, whichrun=1,\n force_mask_run1=True, zscore=True, nan2num=True)\n print('loading run 2 data')\n run2_arrays = load_data(run1_data, run2_data, run1_masks, run2_masks, whichrun=2,\n force_mask_run1=True, zscore=True, nan2num=True)\n print('starting the long journey of classification ...')\n classify_over_nfeatures_nneighbors(run1_arrays, run2_arrays)\n"
},
{
"alpha_fraction": 0.5909899473190308,
"alphanum_fraction": 0.5973128080368042,
"avg_line_length": 34.39160919189453,
"blob_id": "f6938bcf83a21451e3c836124acb5095bea71d20",
"content_id": "dc3358ec198432dfd12624ac943fb5c206f0293f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5061,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 143,
"path": "/srm/srm_bold_data.py",
"repo_name": "oliver-contier/somato_align",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n\"\"\"\nScript to run brainiak's shared response model on our data.\n\nWe'll start with only the first run. Later, we might want to think about concatinating runs in a sensible fashion.\n\nTODO: Generate and apply brain masks with BET.\n\"\"\"\n\nimport glob\nimport os\nimport pickle\nimport time\nfrom os.path import join as pjoin\n\nimport numpy as np\nfrom brainiak.funcalign.rsrm import RSRM\nfrom brainiak.funcalign.srm import SRM\nfrom brainiak.io import load_images\nfrom scipy import stats\n\n\ndef grab_bold_data(cond_id='D1_D5',\n ds_dir='/data/BnB_USER/oliver/somato',\n testsubs=False,\n use_filtered_from_workdir='/home/homeGlobal/oli/somato/scratch/ica/MELODIC/melodic_wf_workdir/'\n 'subject_lvl/somato_melodic_wf/'):\n \"\"\"\n Get file paths for our bold data for a given run.\n If you want to use raw bold-data, set use_filtered_from_workdir to False.\n Else, give working directory of melodic workflow.\n \"\"\"\n sub_ids = [os.path.basename(subdir)\n for subdir in glob.glob(ds_dir + '/*')]\n if testsubs:\n sub_ids = sub_ids[:testsubs]\n if use_filtered_from_workdir:\n boldfiles = [pjoin(use_filtered_from_workdir, 'bpf', 'mapflow', '_bpf%i' % idx, 'data_brain_smooth_filt.nii.gz')\n for idx in range(0, len(sub_ids) * 2, 2)]\n else:\n boldfiles = [pjoin(ds_dir, sub_id, cond_id, 'data.nii.gz')\n for sub_id in sub_ids]\n return boldfiles, sub_ids\n\n\ndef boldfiles_to_arrays(boldfiles,\n z_score=True):\n \"\"\"\n Load bold data into list of arrays (bc that's what brainiak wants)\n \"\"\"\n # create generator that returns nibabel nifti instances\n nibs_gen = load_images(boldfiles)\n # get numpy array data from each those instances (might take a while)\n bold_arrays_list = [np.reshape(nib_instance.get_fdata(), (1327104, 256)) # TODO: don't hard code this\n for nib_instance in nibs_gen]\n if z_score:\n zscored = []\n for bold_array in bold_arrays_list:\n zs = stats.zscore(bold_array, axis=1, ddof=1)\n zs = np.nan_to_num(zs)\n zscored.append(zs)\n # TODO: put z-scoring in preprocessing pipeline later\n bold_arrays_list = zscored\n\n return bold_arrays_list\n\n\ndef train_srm(training_data,\n use_robust_srm=True,\n n_comps=50,\n n_iters=20,\n printruntime=True):\n \"\"\"\n Fit srm on training data\n \"\"\"\n\n # TODO: delete after test\n from mpi4py import MPI\n mpicomm = MPI.COMM_WORLD\n\n if use_robust_srm:\n srm = RSRM(n_iter=n_iters, features=n_comps, comm=mpicomm)\n else:\n srm = SRM(n_iter=n_iters, features=n_comps, comm=mpicomm)\n # fit\n if printruntime:\n start = time.time()\n srm.fit(training_data)\n if printruntime:\n elapsed = start - time.time()\n print('fitting srm took: ', elapsed)\n return srm\n\n\ndef save_srm_as_pickle(srm_instance,\n robust_srm=True,\n pickle_outdir='/home/homeGlobal/oli/somato/scratch/srm'):\n \"\"\"\n take fitted srm instance and save as pickle. If robust srm was used, file will be called \"rsrm.p\" instad of \"srm.p\"\n \"\"\"\n if robust_srm:\n outpickle = pjoin(pickle_outdir, 'rsrm.p')\n else:\n outpickle = pjoin(pickle_outdir, 'srm.p')\n if not os.path.exists(pickle_outdir):\n os.makedirs(pickle_outdir)\n with open(outpickle, 'wb') as f:\n pickle.dump(srm_instance, f)\n return outpickle\n\n\ndef run_srm_pipeline(dsdir='/data/BnB_USER/oliver/somato',\n use_filtered_wd='/home/homeGlobal/oli/somato/scratch/ica/MELODIC/melodic_wf_workdir/'\n 'subject_lvl/somato_melodic_wf/',\n whichcond='D1_D5',\n test_subs=False,\n zscore=True,\n robustsrm=True,\n print_runtime=True,\n ncomps=50,\n niters=20,\n pickleoutdir='/home/homeGlobal/oli/somato/scratch/srm'):\n \"\"\"\n SRM Pipeline.\n \"\"\"\n print('grabbing bold data')\n bold_files, subids = grab_bold_data(cond_id=whichcond, ds_dir=dsdir, testsubs=test_subs,\n use_filtered_from_workdir=use_filtered_wd)\n print('converting to np arrays')\n bold_arrays = boldfiles_to_arrays(bold_files, z_score=zscore)\n print('fitting srm')\n fitted_srm = train_srm(bold_arrays, use_robust_srm=robustsrm, n_comps=ncomps, n_iters=niters,\n printruntime=print_runtime)\n print('saving to pickle')\n pickle_path = save_srm_as_pickle(fitted_srm, robust_srm=robustsrm, pickle_outdir=pickleoutdir)\n print('done!')\n return None\n\n\nif __name__ == '__main__':\n run_srm_pipeline(test_subs=2, robustsrm=False, ncomps=3, niters=2,\n pickleoutdir='/home/homeGlobal/oli/somato/scratch/srm_mpitest')\n"
}
] | 14 |
lukaku3/pygeon | https://github.com/lukaku3/pygeon | bef5ed1ef66110a8f7c8910f3079365e141e005d | a66c3122e7880908847c9cc1cc9f7d6d938580d0 | 0348e0cafb7e549785c44806a3220c58e4dd02d1 | refs/heads/master | 2020-03-22T00:30:46.137311 | 2019-11-24T14:09:33 | 2019-11-24T14:09:33 | 139,249,435 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6754385828971863,
"alphanum_fraction": 0.6945773363113403,
"avg_line_length": 26.866666793823242,
"blob_id": "8dcf6506600595ceb4068f16b04edfc9c0c0c075",
"content_id": "aff0172859b9d103449a01f177f6ed777cd34f6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1366,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 45,
"path": "/README.md",
"repo_name": "lukaku3/pygeon",
"src_encoding": "UTF-8",
"text": "# pygeon\n\n## install packages\n # yum groupinstall \"Development tools\"\n # yum install gcc zlib-devel bzip2 bzip2-devel readline readline-devel sqlite sqlite-devel openssl openssl-devel git GConf2\n\n## pyenv\n $ git clone https://github.com/yyuu/pyenv.git ~/.pyenv\n $ echo 'export PYENV_ROOT=\"$HOME/.pyenv\"' >> ~/.bashrc\n $ echo 'export PATH=\"$PYENV_ROOT/bin:$PATH\"' >> ~/.bashrc\n $ echo 'eval \"$(pyenv init -)\"' >> ~/.bashrc\n $ source ~/.bashrc\n\n## install python3.6.6\n $ pyenv install 3.6.6\n\n## download selenium, chromedriver(linux)\n selenium-server-standalone-3.13.0.jar\n chromedriver\n\n## start selenium-server\n $ java -Dwebdriver.chrome.driver=chromedriver -jar selenium-server-standalone-3.13.0.jar\n\n## make vitrtualenv dir\n $ virtualenv foobardir\n\n## into vitrtualenv\n $ cd foobardir\n $ pyenv local 3.6.6\n (foobardir) $ source bin/activate\n\n## pip in virtualenv\n (foobardir) $ pip install -r FREEZE.txt\n\n## STEP1\n#### 市区町村データを取得\n (foobardir) $ python make_list.py MakeList.test_make_list\n\n# STEP2\n#### 5市区町村ずつページングしながら詳細リンクのhrefを取得(faxがある場合表示)\n(foobardir) $ python make_list.py MakeList.test_scrape_detail_link\n\n# STEP3\n#### 業者の詳細ページを解析取得\n(foobardir) $ python make_list.py MakeList.test_scrape_agent\n"
},
{
"alpha_fraction": 0.4840300679206848,
"alphanum_fraction": 0.4921969771385193,
"avg_line_length": 39.95033264160156,
"blob_id": "37b9a617eb2d58dc093491e89f0dcc15c3a7d43d",
"content_id": "b246c8f800b9d5375aa0065f0964a9107fb23cdd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12627,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 302,
"path": "/make_list.py",
"repo_name": "lukaku3/pygeon",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport unittest\nimport os\nimport csv\nimport re\nimport sys\nimport logging\nimport requests\nimport pprint\nimport json\nimport time\nimport nose.tools as nose\nfrom PIL import Image\nfrom selenium import webdriver\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilities\nfrom multiprocessing import Pool\nfrom bs4 import BeautifulSoup\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.webdriver.chrome.options import Options\n\nclass MakeList(unittest.TestCase):\n\n selenium_server = 'http://localhost:4444/wd/hub'\n# selenium_server = 'http://192.168.33.1:4444/wd/hub'\n base_url = 'http://www.hatomarksite.com/search/zentaku/agent/area/#!pref=%s'\n detail_url = 'http://www.hatomarksite.com%s'\n dialog_url = 'http://www.hatomarksite.com/search/zentaku/agent/area/dialog/syz?pref='\n# pref_list = {'13':'東京','12':'千葉','11':'埼玉','14':'神奈川','27':'大阪'}\n pref_list = {'11':'埼玉','12':'千葉','14':'神奈川','27':'大阪','13':'東京'}\n default_log = 'test.log'\n pref_json = 'pref%s.json'\n tmp_pref_csv = 'tmp%s.csv'\n agent_csv = 'agents%s.csv'\n city_max = 5\n css_search_btn = 'body > div.dialogBody > div.box.align-center.clearfix.PIE > button.button.button-bordered.button-royal.PIE'\n css_next_link = 'div.pagination.align-center > ol > li.next > a'\n# next_a1 = '#container > div.main.right > div:nth-child(2) > div:nth-child(2) > div > ol > li.next > a'\n# css_next_link = '#container > div.main.right > div:nth-child(7) > div:nth-child(1) > div > ol > li.next > a'\n x = 1024\n y = 768\n browser_path = '/usr/bin/google-chrome'\n\n def setUp(self):\n self.get_pref_list()\n#----- launch visible \n# self.driver = webdriver.Remote(\n# command_executor= self.selenium_server,\n# desired_capabilities=DesiredCapabilities.CHROME)\n#----- launch background start\n opts = Options()\n opts.binary_location = self.browser_path\n opts.add_argument('--headless')\n opts.add_argument('--disable-gpu')\n opts.add_argument('--no-sandbox')\n self.driver = webdriver.Chrome(executable_path=os.path.abspath(\"chromedriver\"), chrome_options=opts)\n#----end\n self.driver.set_window_size(self.x, self.y)\n self.driver.implicitly_wait(10)\n\n def setup_logger(self,filepath):\n if filepath is None:\n filepath = self.default_log\n self.logging = logging.getLogger('LoggingTest')\n self.logging.setLevel(10)\n fh = logging.FileHandler(filepath, delay=True, encoding='utf-8')\n# formatter = logging.Formatter('%(asctime)s - '\n# '%(levelname)s - '\n# '%(filename)s:%(lineno)d - '\n# '%(funcName)s - '\n# '%(message)s')\n# fh.setFormatter(formatter)\n self.logging.addHandler(fh)\n\n def get_pref_list(self):\n args = sys.argv[2:]\n new_list = {}\n if len(args):\n for arg in args:\n if arg in self.pref_list.keys():\n new_list[arg] = arg\n self.pref_list = new_list\n\n def test_make_list(self):\n print('start make pref.json')\n driver = self.driver\n pref_list_json = []\n self.setup_logger(self.pref_json)\n for pref in self.pref_list.keys():\n f = open(self.pref_json % pref,'a')\n driver.get(self.base_url % pref ) # 最初、のページを開く\n pref_json = {}\n pref_json['id'] = pref\n pref_json['name'] = self.pref_list[pref]\n pref_json['city'] = []\n req = requests.get(self.dialog_url + pref) # 各dialogのページを開く\n soup = BeautifulSoup(req.text, \"lxml\")\n for i in soup.find_all(\"dt\"): # 全inputをリストへ格納\n if ( pref == \"14\" and re.match(r'(横浜|川崎)', i.find('label').string)) is None: # 神奈川は川崎と横浜\n continue\n # jsonデータ準備\n city = {}\n city['id'] = i.find(\"input\").get('id')\n# city['value'] = i.find(\"input\").get('value')\n# city['name'] = re.sub( r'\\(|\\)|[0-9]+','', i.find('label').string)\n# city['count'] = re.sub(r'\\D', '', i.find('label').string)\n pref_json['city'].append(city)\n\n time.sleep(1)\n json.dump(pref_json, f)\n f.close()\n\n\n def test_switch_window(self):\n driver = self.driver\n driver.get( 'https://www.yahoo.com' )\n # set 'python' to #uh-search-box\n driver.find_element_by_name('p').send_keys('Selenium Python3')\n driver.find_element_by_name('p').send_keys(Keys.ENTER)\n #driver.send_keys(Keys.ENTER)\n time.sleep(2)\n driver.find_element_by_css_selector('#web > ol > li:nth-child(1) > div > div > h3 > a').click()\n pprint.pprint( driver.title )\n driver.switch_to.window( driver.window_handles[1] ) # go next window\n pprint.pprint( driver.title )\n driver.switch_to.window( driver.window_handles[0] ) # go back window\n pprint.pprint( driver.title )\n\n def test_scrape_detail_link(self):\n driver = self.driver\n # self.setup_logger(None)\n for pref in self.pref_list.keys():\n fpath = self.pref_json % pref\n if os.path.exists(fpath): # 県別 市区町村を開く\n tmp_f = open(self.tmp_pref_csv % pref,'a')\n self.tmp_f = None\n self.tmp_f = writer = csv.writer(tmp_f)\n with open(fpath, \"r\") as f:\n pref = json.load(f)\n# for url in pref_json:\n# driver.get( self.base_url % pref['id'] )\n time.sleep(2)\n city_cnt = 0 # 最大5つまでチェック(市区町村)\n city_list = []\n for city in pref['city']:\n driver.get( self.base_url % pref['id'] )\n city_list.append(city['id'])\n if len(city_list) == self.city_max:\n print(city_list)\n time.sleep(1)\n self.click_city(city_list)\n self.collect_link()\n print('init city_list')\n city_list = []\n else:\n pass\n if len(city_list) > 0:\n print(city_list)\n self.click_city(city_list)\n self.collect_link()\n city_list = []\n\n tmp_f.close()\n else:\n print('%s is not exists.' % fpath)\n# tmp_f.close()\n else:\n pass # for pref in self.pref_list.keys()\n print('end loop')\n\n# fpath = self.pref_json # 県別 市区町村を読む\n# if os.path.exists(fpath):\n# with open(fpath, \"r\") as f:\n# pref_json = json.load(f)\n# for url in pref_json:\n# # self.setup_logger(self.tmp_pref_csv % url['id'])\n# f = open(self.tmp_pref_csv % pref,'a')\n# driver.get( self.base_url % url['id'] )\n# time.sleep(2)\n# city_cnt = 0 # 最大5つまでチェック(市区町村)\n# city_list = []\n# city_idx = 0\n# for city in url['city']:\n# driver.get( self.base_url % url['id'] )\n# print(\"append:\" + city['id'])\n# city_list.append(city['id'])\n# if len(city_list) == self.city_max:\n# self.click_city(city_list)\n# time.sleep(1)\n# print('init city_list')\n# self.collect_link(f)\n# city_list = []\n# city_idx += 1\n# if len(city_list) > 0:\n# self.click_city(city_list)\n# self.collect_link(f)\n# city_list = []\n# f.close()\n# else:\n# print('%s is not exists.' % fpath)\n# pass\n\n def test_scrape_agent(self):\n print('start scrape fax')\n driver = self.driver\n # self.setup_logger(None)\n for pref in self.pref_list:\n driver.get( self.base_url % pref )\n self.setup_logger(self.agent_csv % pref)\n csv_path = self.tmp_pref_csv % pref\n if os.path.exists(csv_path):\n with open(csv_path, newline='') as f:\n dataReader = csv.reader(f)\n for row in dataReader:\n print(row[0])\n driver.get( self.detail_url % row[0] )\n time.sleep(1)\n soup = BeautifulSoup(driver.page_source, \"lxml\")\n try:\n tbl = soup.find('table').find_all('td')\n detail = []\n detail.append(tbl[0])\n detail.append(tbl[1])\n detail.append(tbl[2])\n detail.append(tbl[7])\n detail.append(tbl[8])\n detail_str = \",\".join(map(str,detail))\n self.logging.info( re.sub( r'<((/|)td|td\\scolspan=\"[0-9]\")>', '', detail_str) )\n except:\n print(\"%s is not exists.\" % row[0])\n pass\n else:\n f.close()\n else:\n print('%s is not exists.' % csv_path)\n pass\n\n\n def click_city(self, city_list):\n driver = self.driver\n iframe = driver.find_element_by_css_selector('#fancybox-frame')\n driver.switch_to.frame(iframe)\n for c in city_list:\n driver.find_element_by_id(c).click() # 市区町村をclick\n\n# driver.save_screenshot('screenshots/1.png')\n driver.find_element_by_css_selector(self.css_search_btn).click() # 検索btnをクリック\n time.sleep(1)\n Select(driver.find_element_by_css_selector('select.displayCount')).select_by_value('50')\n driver.switch_to.default_content()\n\n\n def collect_link(self):\n driver = self.driver\n# logging = self.logging\n# driver.switch_to_default_content()\n time.sleep(1)\n soup = BeautifulSoup(driver.page_source, \"lxml\")\n tbl = soup.find_all('table')\n for t in tbl:\n url = []\n if t.find('a').string is not None:\n url.append(t.find('a').get('href'))\n url.append(t.find('a').string)\n# print(','.join(url))\n# f.writerows( url )\n self.tmp_f.writerow( url )\n# self.logging.info( ','.join(url) )\n# url.append(t.find('a').get('href'))\n# if t.find('a').string:\n# url.append(t.find('a').string)\n# self.logging.info( ','.join(url) )\n\n else:\n paginate = soup.select(self.css_next_link)\n print(paginate)\n if len(paginate) > 0:\n time.sleep(1)\n# element.click()\n# driver.find_element_by_css_selector(self.css_next_link).click()\n try:\n #driver.find_element_by_css_selector(self.css_next_link)\n element = WebDriverWait(driver, 10).until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, self.css_next_link)))\n element.click()\n except OSError as err:\n driver.save_screenshot('screenshots/paginate_err.png')\n print(\"OS error: {0}\".format(err))\n self.collect_link()\n else:\n print('next-link is not exists.')\n pass\n\n def tearDown(self):\n self.driver.close()\n pass\n\nif __name__ == \"__main__\":\n unittest.main()\n"
},
{
"alpha_fraction": 0.621052622795105,
"alphanum_fraction": 0.621052622795105,
"avg_line_length": 30,
"blob_id": "c6597b017a867de80008f5efb2c8d6f1ffb1a060",
"content_id": "326ca96007815d1c4c079981eedcc9c0a5ad97ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 95,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 3,
"path": "/tests/sub.py",
"repo_name": "lukaku3/pygeon",
"src_encoding": "UTF-8",
"text": "\nclass subClass():\n def display(self, driver):\n print(\"this is subClass.display()\")\n\n"
},
{
"alpha_fraction": 0.679496169090271,
"alphanum_fraction": 0.6913926005363464,
"avg_line_length": 35.64102554321289,
"blob_id": "4db5f010e8f71f59f694ecc9248d8bbb37f46202",
"content_id": "ffeb9f651ec28226d8b817139dbd853258a34869",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1429,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 39,
"path": "/MyCase.py",
"repo_name": "lukaku3/pygeon",
"src_encoding": "UTF-8",
"text": "import unittest, os\nfrom selenium import webdriver\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilities\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.webdriver.chrome.options import Options\nfrom sub import subClass\n\nclass MyTestCase(unittest.TestCase):\n selenium_server = 'http://localhost:4444/wd/hub'\n # selenium_server = 'http://192.168.33.1:4444/wd/hub'\n\n def setUp(self):\n opts = Options()\n # opts.binary_location = self.browser_path\n # opts.add_argument('--headless')\n opts.add_argument('--disable-gpu')\n opts.add_argument('--no-sandbox')\n # self.driver = webdriver.Chrome(executable_path=os.path.abspath(\"chromedriver\"), chrome_options=opts)\n # ----- launch remote start\n self.driver = webdriver.Remote(\n # command_executor= self.selenium_server,\n desired_capabilities=DesiredCapabilities.CHROME,\n options=opts\n )\n # ----- launch remote end\n\n def test_something(self):\n subClass.display(self, self.driver)\n self.assertEqual(True, True)\n\n def tearDown(self):\n self.driver.close()\n\nif __name__ == '__main__':\n unittest.main()\n"
}
] | 4 |
wang264/InterviewPrep | https://github.com/wang264/InterviewPrep | d923d744bec16b092dca51b48ab59c14fb9255ad | 43c32a8006cb2c3f7bcabfd1bf795101b9043616 | fc11f09d2f03bb1b543a658ece2939da71192099 | refs/heads/master | 2023-07-27T15:02:05.111828 | 2021-09-13T12:06:42 | 2021-09-13T12:06:42 | 405,179,981 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.632949709892273,
"alphanum_fraction": 0.6541489958763123,
"avg_line_length": 22.253520965576172,
"blob_id": "262b22042c7ab11d3f063ad9643059b233ce3447",
"content_id": "1fecf6b6ae7daabc43be98be5b1903e3388f3845",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1655,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 71,
"path": "/iterator_and_generator.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# https://www.w3schools.com/python/python_iterators.asp\n# https://www.geeksforgeeks.org/difference-between-iterator-vs-generator/\n\n\n# WHY we need them. save RAM/Memory. We dont need to store them in advance, only generate them when we need it.\n\n# An iterator is an object that can be iterated upon, meaning that you can traverse through all the values.\n#\n# Technically, in Python, an iterator is an object which implements the iterator protocol,\n# which consist of the methods __iter__() and __next__().\n\nclass MyNumbers:\n def __init__(self, start=1, max_size=10, step=1):\n self.max_size = max_size\n self.val = start\n self.step = step\n\n def __iter__(self):\n return self\n\n def __next__(self):\n x = self.val\n self.val += self.step\n if x > self.max_size:\n raise StopIteration\n else:\n return x\n\n\nmyclass = MyNumbers()\nmyiter = iter(myclass)\n\nprint(next(myiter))\nprint(next(myiter))\n\nfor idx, numb in enumerate(MyNumbers(start=1, max_size=100000000, step=2)):\n print(idx)\n\na = [x for x in range(1000000000) if x % 2 == 1]\n\n\n# generator\n# It is another way of creating iterators in a simple way\n# where it uses the keyword “yield” instead of returning\n# it in a defined function.\n\ndef sq_numbers(n):\n for i in range(1, n + 1):\n yield i * i\n\n\na = sq_numbers(3)\n\nprint(\"The square of numbers 1,2,3 are : \")\nprint(next(a))\nprint(next(a))\nprint(next(a))\n\nfor i in sq_numbers(6):\n print(i)\n\nmytuple = (\"apple\", \"banana\", \"cherry\")\nmyit = iter(mytuple)\nprint(myit)\n\nprint(next(myit))\nprint(next(myit))\nprint(next(myit))\n\nfor fruit in mytuple:\n print(fruit)\n"
},
{
"alpha_fraction": 0.6392405033111572,
"alphanum_fraction": 0.6909282803535461,
"avg_line_length": 20.0222225189209,
"blob_id": "7f26daa6604d6f792f620c5c92abb4c2bd13c567",
"content_id": "cae770048bca2669cc1f261546ee6d1bb8a7875a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 948,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 45,
"path": "/deep_copy_shallow_copy.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# https://www.geeksforgeeks.org/copy-python-deep-copy-shallow-copy/\n\n\n# importing copy module\nimport copy\n\n# initializing list 1\nli1 = [1, 2, [3, 5], 4]\n\n# if we create a reference\nreference = li1\n\n# using copy for shallow copy\nli2 = copy.copy(li1)\n\n# using deepcopy for deepcopy\nli3 = copy.deepcopy(li1)\n\nid(reference)\nid(li1)\nid(reference) == id(li1)\n# we can see that 'reference' and 'li1'actually point to the same list\n\n# how about li1 and li2\nid(li1)\nid(li2)\nid(li1) == id(li2)\n# hmm, so li1 and li2 is different list.\n# however, notice that they are list of list, the third element is a list.\nid(li1[2]) == id(li2[2])\n# they are actually the same list.\nli1[2].append(4)\nli2[2]\n\n\n# how about li1 and li3\nid(li1)\nid(li3)\nid(li1) == id(li3)\n# hmm, so li1 and li3 is different list.\n# however, notice that they are list of list, the third element is a list.\nid(li1[2]) == id(li3[2])\n# they are actually different list.\nli1[2].append(4)\nli3[2]\n\n\n"
},
{
"alpha_fraction": 0.5773809552192688,
"alphanum_fraction": 0.5952380895614624,
"avg_line_length": 17.66666603088379,
"blob_id": "b77998248c2d893e246dbd6eb1ec9be7ccc2d386",
"content_id": "6d8bf423edd3bda9a569d2e8b1077efb8dd395fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 168,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 9,
"path": "/object.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "class ListNode:\n def __init__(self, val, next=None):\n self.val = val\n self.next = next\n\n\nnode = ListNode(10, next=None)\n\nprev = ListNode(0, next=node)\n"
},
{
"alpha_fraction": 0.6346604228019714,
"alphanum_fraction": 0.6346604228019714,
"avg_line_length": 21.473684310913086,
"blob_id": "0e695594db5afaf082733653016cde4ab0f6d2cc",
"content_id": "0bc4327573e4dcf40386910d3b84e60d704e0450",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 427,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 19,
"path": "/with_statement.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# https://www.geeksforgeeks.org/with-statement-in-python/\n\n\nclass MessageWriter(object):\n def __init__(self, file_name):\n self.file_name = file_name\n\n def __enter__(self):\n self.file = open(self.file_name, 'w')\n return self.file\n\n def __exit__(self):\n self.file.close()\n\n\n# using with statement with MessageWriter\n\nwith MessageWriter('my_file.txt') as xfile:\n xfile.write('hello world')\n"
},
{
"alpha_fraction": 0.6560776829719543,
"alphanum_fraction": 0.6576529145240784,
"avg_line_length": 22.664596557617188,
"blob_id": "64a45d6c2e0cb5f2f0698567bf000d9cb5ba177f",
"content_id": "deb4db17b2b44a519ab332c9f34a8484bcb57249",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3825,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 161,
"path": "/decorators.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# https://realpython.com/primer-on-python-decorators/\n\n\n# Functions are First-Class Objects\n# In Python, functions are first-class objects. This means that functions can be passed around and\n# used as arguments, just like any other object (string, int, float, list, and so on).\n# Consider the following three functions:\n\ndef say_hello(name):\n return f\"Hello {name}\"\n\n\ndef be_awesome(name):\n return f\"Yo {name}, together we are the awesomest!\"\n\n\ndef greet_bob(greeter_func):\n return greeter_func(\"Bob\")\n\n\ngreet_bob(say_hello)\n\ngreet_bob(be_awesome)\n\n\n# Inner Functions\n# It’s possible to define functions inside other functions. Such functions are called inner functions.\n# Here’s an example of a function with two inner functions:\n\ndef parent():\n print(\"Printing from the parent() function\")\n\n def first_child():\n print(\"Printing from the first_child() function\")\n\n def second_child():\n print(\"Printing from the second_child() function\")\n\n second_child()\n first_child()\n\n\nparent()\n\n\n#\n# Returning Functions From Functions\n# Python also allows you to use functions as return values. The following example returns one of the inner\n# functions from the outer parent() function:\n\ndef parent(num):\n def first_child():\n return \"Hi, I am Emma\"\n\n def second_child():\n return \"Call me Liam\"\n\n if num == 1:\n return first_child\n else:\n return second_child\n\n\nfirst = parent(1)\nsecond = parent(2)\n\nfirst\n\nsecond\n\n\n# Simple Decorators\n# Now that you’ve seen that functions are just like any other object in Python, you’re ready to move on and see\n# the magical beast that is the Python decorator. Let’s start with an example:\n\ndef my_decorator(func):\n def wrapper():\n print(\"Something is happening before the function is called.\")\n func()\n print(\"Something is happening after the function is called.\")\n\n return wrapper\n\n\ndef say_whee():\n print(\"Whee!\")\n\n\nsay_whee_before_after = my_decorator(say_whee)\n\nsay_whee()\nsay_whee_before_after()\n\nsay_whee\nsay_whee_before_after\n\n##########################################################################################\n# Put simply: decorators wrap a function, modifying its behavior.\n###########################################################################################\n# Before moving on, let’s have a look at a second example. Because wrapper() is a regular Python function, the\n# way a decorator modifies a function can change dynamically. So as not to disturb your neighbors, the following\n# example will only run the decorated code during the day:\n\nfrom datetime import datetime\n\n\ndef not_during_the_night(func):\n def wrapper():\n if 7 <= datetime.now().hour < 22:\n func()\n else:\n pass # Hush, the neighbors are asleep\n\n return wrapper\n\n\ndef say_whee():\n print(\"Whee!\")\n\n\nsay_whee = not_during_the_night(say_whee)\n\nsay_whee()\n\n\n#\n# Syntactic Sugar!\n# The way you decorated say_whee() above is a little clunky. First of all, you end up typing the name say_whee\n# three times. In addition, the decoration gets a bit hidden away below the definition of the function.\n#\n# Instead, Python allows you to use decorators in a simpler way with the @ symbol, sometimes called the “pie”\n# syntax. The following example does the exact same thing as the first decorator example:\n\ndef my_decorator(func):\n def wrapper():\n print(\"Something is happening before the function is called.\")\n func()\n print(\"Something is happening after the function is called.\")\n\n return wrapper\n\n\n@my_decorator\ndef say_whee():\n print(\"Whee!\")\n\nsay_whee()\n\n\ndef do_twice(func):\n def wrapper_do_twice():\n func()\n func()\n return wrapper_do_twice\n\n@do_twice\ndef say_whee():\n print(\"Whee!\")\n\n\nsay_whee()"
},
{
"alpha_fraction": 0.8666666746139526,
"alphanum_fraction": 0.8666666746139526,
"avg_line_length": 14,
"blob_id": "b9003c102768f4d9487f4f7f46d21104fa59b439",
"content_id": "f1c8bda3a46fc20fab77b473dee268a4ea3f7819",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 30,
"license_type": "no_license",
"max_line_length": 15,
"num_lines": 2,
"path": "/README.md",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# InterviewPrep\nInterviewPrep\n"
},
{
"alpha_fraction": 0.6867846250534058,
"alphanum_fraction": 0.7013527750968933,
"avg_line_length": 24.236841201782227,
"blob_id": "080b9ec6971e3bab607c03add5761f3d89117be5",
"content_id": "625c53476a6a800a336a19bc8b7fe1111f6baa1d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 963,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 38,
"path": "/list_as_default_argument_of_function.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# In Python, is list a good choice for default argument for a function? Why\n# NO\n#\n# https://docs.python-guide.org/writing/gotchas/\n\ndef append_to(element, to=[]):\n to.append(element)\n return to\n\n\nmy_list = append_to(12)\nprint(my_list)\n\nmy_other_list = append_to(42)\nprint(my_other_list)\n\n\n# what you expect\n# [12]\n# [42]\n\n# #what actually happen\n# [12]\n# [12, 42]\n\n# Python’s default arguments are evaluated once when the function is defined, not each time the function is called\n# (like it is in say, Ruby). This means that if you use a mutable default argument and mutate it, you will and have\n# mutated that object for all future calls to the function as well.\n\n# What You Should Do Instead\n# Create a new object each time the function is called, by using a default arg to signal that no argument was\n# provided (None is often a good choice).\n\ndef append_to(element, to=None):\n if to is None:\n to = []\n to.append(element)\n return to\n\n\n"
},
{
"alpha_fraction": 0.7240674495697021,
"alphanum_fraction": 0.733265221118927,
"avg_line_length": 27.77941131591797,
"blob_id": "c2821b26182243a8fa5946eb15f59f406bdaa55d",
"content_id": "98d240939c94df42434411c86bc5261775ae1aa3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1964,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 68,
"path": "/late_binding_closure.py",
"repo_name": "wang264/InterviewPrep",
"src_encoding": "UTF-8",
"text": "# What You Wrote¶\ndef create_multipliers():\n return [lambda x: i * x for i in range(5)]\n\n\n# What You Might Have Expected to Happen\nfor multiplier in create_multipliers():\n print(multiplier(2))\n\n\n#\n# A list containing five functions that each have their own closed-over i variable that multiplies their argument,\n# producing:\n\n# 0\n# 2\n# 4\n# 6\n# 8\n\n# What Actually Happens\n# 8\n# 8\n# 8\n# 8\n# 8\n# Five functions are created; instead all of them just multiply x by 4.\n\n# Python’s closures are late binding. This means that the values of variables used in closures are looked up at the\n# time the inner function is called. Here, whenever any of the returned functions are called, the value of i is\n# looked up in the surrounding scope at call time. By then, the loop has completed and i is left with its final\n# value of 4. What’s particularly nasty about this gotcha is the seemingly prevalent misinformation that this has\n# something to do with lambdas in Python. Functions created with a lambda expression are in no way special, and\n# in fact the same exact behavior is exhibited by just using an ordinary def:\n\ndef create_multipliers():\n multipliers = []\n\n for i in range(5):\n def multiplier(x):\n return i * x\n\n multipliers.append(multiplier)\n\n return multipliers\n\n\nfor multiplier in create_multipliers():\n print(multiplier(2))\n\n\n# What You Should Do Instead\n# The most general solution is arguably a bit of a hack. Due to Python’s aforementioned behavior\n# concerning evaluating default arguments to functions (see Mutable Default Arguments), you can create a\n# closure that binds immediately to its arguments by using a default arg like so:\n\ndef create_multipliers():\n return [lambda x, i=i: i * x for i in range(5)]\n\n\n# Alternatively, you can use the functools.partial function:\n\nfrom functools import partial\nfrom operator import mul\n\n\ndef create_multipliers():\n return [partial(mul, i) for i in range(5)]\n"
}
] | 8 |
Smartboysai003/funcions | https://github.com/Smartboysai003/funcions | 628086c7778bbfe4067cb7b54982b16a9dfc63df | 1ca1697f477996fa38fcc1639019937f5c1c1c1d | d3c89ba61c2fa34a4c768b92c5971733fb875537 | refs/heads/main | 2023-06-18T07:29:38.019933 | 2021-07-22T12:07:07 | 2021-07-22T12:07:07 | 386,611,779 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4885844886302948,
"alphanum_fraction": 0.5068492889404297,
"avg_line_length": 16.25,
"blob_id": "24f1ab053c5a07bcd2613b23f96bdb21f8bfeb7e",
"content_id": "e6d222de21a9eb00b6b61f57bfde300d92e4fb33",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 219,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 12,
"path": "/prime number.py",
"repo_name": "Smartboysai003/funcions",
"src_encoding": "UTF-8",
"text": "#prime\r\ndef is_prime(n):\r\n for i in range(2,n//2+1):\r\n if n%i==0:\r\n return False\r\n return True\r\nn=int(input())\r\nis_prime(n)\r\nif (is_prime(n)):\r\n print(\"prime\")\r\nelse:\r\n print(\"not prime\")\r\n"
},
{
"alpha_fraction": 0.4861111044883728,
"alphanum_fraction": 0.5069444179534912,
"avg_line_length": 22,
"blob_id": "2ed3d39d945709078d8b92a29e550004e1908ee0",
"content_id": "bb86ab8fa52567ff6a20730a7bc77bd46b4fcf21",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 288,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 12,
"path": "/prime numbers range for given two numbers.py",
"repo_name": "Smartboysai003/funcions",
"src_encoding": "UTF-8",
"text": "#prime range for given two numbers\r\ndef is_prime(n):\r\n if n==1:\r\n return False\r\n for i in range(2,n//2+1):\r\n if (n%i==0):\r\n return False\r\n return True\r\na,b=map(int,input().split())\r\nfor i in range(a,b+1):\r\n if(is_prime(i)):\r\n print(i,end=\" \")\r\n"
},
{
"alpha_fraction": 0.47147336602211,
"alphanum_fraction": 0.48087775707244873,
"avg_line_length": 27.351852416992188,
"blob_id": "c1c1795df0db5bd51fb5d916003a285d68bf2d34",
"content_id": "5e9315d351bd4a208ac593f01dbe784cbdaaa43a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1595,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 54,
"path": "/Happy Numbers using Functions.py",
"repo_name": "Smartboysai003/funcions",
"src_encoding": "UTF-8",
"text": "# Function for happy number\r\ndef is_happy(n): # Return True if the given number is happy else it will return False\r\n while n!=1 and n!=4:\r\n ss = 0\r\n while n:\r\n r = n%10\r\n ss += r*r\r\n n = n//10\r\n\r\n n = ss\r\n if(n==1):\r\n return True\r\n else:\r\n return False\r\n \r\n\r\nwhile(True):\r\n # Input from user\r\n x = input('Check or Range?:')\r\n \r\n # If user enters Check\r\n # Ask to give a number to check\r\n if(x == 'Check'):\r\n num = int(input('Enter a number to check:')) #12\r\n # if num is happy\r\n # print Happy number\r\n if(is_happy(num)): # Function call 1 is_happy(10)\r\n print('Happy')\r\n # else\r\n # print unhappy number\r\n else:\r\n print('Unhappy')\r\n\r\n # If user enters Range as a choice\r\n # Ask user to enter two numbers\r\n elif(x == 'Range'):\r\n a, b = map(int, input('Enter two numbers:').split())\r\n # Ask user if he wants happy series or unhappy series\r\n choice = input('Happy or Unhappy:')\r\n # If choice is happy\r\n # Print all the happy numbers in the given range\r\n if(choice == 'Happy'):\r\n for i in range(a,b+1):\r\n if(is_happy(i)):\r\n print(i,end = ' ')\r\n # if the choice is unhappy\r\n # Print all the unhappy numbers in the given range\r\n else:\r\n for i in range(a,b+1):\r\n if(not is_happy(i)):\r\n print(i,end=' ')\r\n else:\r\n print('Invalid Input')\r\n break\r\n \r\n"
},
{
"alpha_fraction": 0.40689656138420105,
"alphanum_fraction": 0.43448275327682495,
"avg_line_length": 12.5,
"blob_id": "a7f2d3d64cbd28c7f1f1fd47e0fcfb4bf5abfe5e",
"content_id": "9285f979f3cf0f2737a20a2278936b49112087fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 145,
"license_type": "no_license",
"max_line_length": 17,
"num_lines": 10,
"path": "/fibonacci series.py",
"repo_name": "Smartboysai003/funcions",
"src_encoding": "UTF-8",
"text": "#n th fib number\r\ndef fib(n):\r\n a=0\r\n b=1\r\n while(n-1):\r\n a,b=b,a+b\r\n n=n-1\r\n return a\r\nn=int(input())\r\nprint(fib(n))\r\n"
}
] | 4 |
BDAthlon/2017-Triple_Helix-2 | https://github.com/BDAthlon/2017-Triple_Helix-2 | d22a75bd85c88ffb8cb83678f73ebead1c9df077 | ebd2bef8f383e0d163e198e60ff19d318b5205d0 | 18a244f9fc6b382a80f1bb94a995b04b45a1ccc0 | refs/heads/master | 2021-01-15T12:37:37.616497 | 2017-08-17T12:37:31 | 2017-08-17T12:37:31 | 99,653,383 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6734926104545593,
"alphanum_fraction": 0.7127417325973511,
"avg_line_length": 29.39285659790039,
"blob_id": "7fdf5c282a7c7aceae405ef6a26373fd325b849b",
"content_id": "55208ea6c2fe723aebbac96ba5f81efdf11c9123",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1758,
"license_type": "no_license",
"max_line_length": 173,
"num_lines": 56,
"path": "/BioBlobs/game.py",
"repo_name": "BDAthlon/2017-Triple_Helix-2",
"src_encoding": "UTF-8",
"text": "from flask import Flask\r\nfrom flask import render_template\r\nfrom flask import request\r\nimport os\r\n\r\nimport createblobs as createblobs\r\nimport score as score\r\nimport playgame as playgame\r\n\r\napp = Flask(\"BioBlobs\") #this is defininf our flak app\r\n\r\[email protected](\"/\")\r\ndef hello():\r\n\tblobs = createblobs.createblobs(0)\r\n\tblobxy = zip(*blobs)\r\n\tprint 'Random blob coordinates: ' + str(blobxy)\r\n\ttrace1x= blobxy[0]\r\n\ttrace1y= blobxy[1]\r\n\ttrace2x=[]\r\n\ttrace2y=[]\r\n\ttrace3x=[]\r\n\ttrace3y=[]\r\n\ttrace4x=[]\r\n\ttrace4y=[]\r\n\treturn render_template(\"hello.html\", trace1x=trace1x, trace1y=trace1y, trace2x=trace2x, trace2y=trace2y, trace3x=trace3x, trace3y=trace3y, trace4x=trace4x, trace4y=trace4y)\r\n\r\n\r\[email protected](\"/submit\", methods=[\"POST\"])\r\ndef submittted():\r\n\tform_data = request.form\r\n\tprint 'form submitted, data in form:'\r\n\tprint form_data\r\n\r\n\tstochastic = (form_data['stochastic'] == \"true\")\r\n\t\r\n\tscores = score.score()\r\n\tsimvalues = playgame.playgame(os.getcwd(), stochastic, float(form_data['param1']), float(form_data['param2']))\r\n\tprint 'scores:' + str(scores)\r\n\tprint 'simvalues:' + str(simvalues)\r\n\ttry:\r\n\t\ttrace2x= score.convert_to_integers(scores[0][0])\r\n\t\ttrace2y= score.convert_to_integers(scores[0][1])\r\n\texcept:\r\n\t\ttrace2x=[1]\r\n\t\ttrace2y=[1]\r\n\ttrace1x= score.convert_to_integers(scores[1][0])\r\n\ttrace1y= score.convert_to_integers(scores[1][1])\r\n\ttrace3x= score.convert_to_integers(simvalues[0])\r\n\ttrace3y= score.convert_to_integers(simvalues[1])\r\n\ttrace4x= score.convert_to_integers(simvalues[0])\r\n\ttrace4y= score.convert_to_integers(simvalues[2])\r\n\r\n\treturn render_template(\"hello.html\", trace1x=trace1x, trace1y=trace1y, trace2x=trace2x, trace2y=trace2y, trace3x=trace3x, trace3y=trace3y, trace4x=trace4x, trace4y=trace4y)\r\n\r\napp.run(\r\n\tdebug = True)\r\n"
},
{
"alpha_fraction": 0.5853034853935242,
"alphanum_fraction": 0.6287540197372437,
"avg_line_length": 25.913793563842773,
"blob_id": "e11d117d049375a2c2a9ac566abe7415c744c424",
"content_id": "392047bac17536b74d91309bb9e43e7a3083e9ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1565,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 58,
"path": "/BioBlobs/score.py",
"repo_name": "BDAthlon/2017-Triple_Helix-2",
"src_encoding": "UTF-8",
"text": "import random\nimport os\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport csv\nfrom numpy import genfromtxt\nimport pandas as pd\n\ndef playgametest(param1, param2):\n x = [(0,1,10,20,30,40,50),(0,10,100,150,180,200,250),(0,5,20,40,80,100,120)]\n return x\n\ndef convert_to_integers(tuple):\n newlist= list()\n for number in tuple:\n newlist.append(int(number))\n return newlist\n\ndef score():\n df_blobs=pd.read_csv('blobvalues.csv', sep=',',header=None)\n #print df_blobs.values\n\n df_sim =pd.read_csv('simvalues.csv', sep=',',header=None)\n #print df_sim.values\n\n right_timings_1 = [sublist[0] for sublist in df_blobs.values - 1]\n #right_timings - 1 so indexes\n\n diff_simblob = []\n count = 0\n for timing in right_timings_1:\n diff_simblob.append(100.0*abs(df_sim.values[timing][2] - df_blobs.values[count][1])/(df_blobs.values[count][1]))\n count = count + 1\n #print diff_simblob\n\n scoring = []#1*[None]\n for difference in diff_simblob:\n scoring.append(difference < 5)\n #print scoring\n\n #get coordinates of blob that was hit\n count = 0\n hit_blobs = []\n nothit_blobs = []\n for correct in scoring:\n if correct:\n hit_blobs.append([df_blobs[0][count],df_blobs[1][count]])\n else:\n nothit_blobs.append([df_blobs[0][count],df_blobs[1][count]])\n count = count + 1\n #print hit_blobs\n hit_blobs = zip(*hit_blobs)\n nothit_blobs = zip(*nothit_blobs)\n return hit_blobs,nothit_blobs\n\n#run the function\n#score()\n#print score()\n\n\n\n\n"
},
{
"alpha_fraction": 0.808080792427063,
"alphanum_fraction": 0.808080792427063,
"avg_line_length": 8,
"blob_id": "b9f84a1cb3cbc035f3cb9cfa83f3ee11e9ae2657",
"content_id": "d9cdd63cd63b2f903be2d3c9ee54687d3f209277",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 99,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 11,
"path": "/requirements.txt",
"repo_name": "BDAthlon/2017-Triple_Helix-2",
"src_encoding": "UTF-8",
"text": "# Web\nFlask\n\n# Numerical/plotting\nnumpy\nmatplotlib\npandas\n\n# Biological simulations\npysces\nstochpy\n"
},
{
"alpha_fraction": 0.5809795260429382,
"alphanum_fraction": 0.5957502126693726,
"avg_line_length": 30.63559341430664,
"blob_id": "6499cdce8406e52bffe10d0580634ed753cf0e7b",
"content_id": "7ba1e55237fff2abd21d53c40c06b77656b61601",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3859,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 118,
"path": "/BioBlobs/playgame.py",
"repo_name": "BDAthlon/2017-Triple_Helix-2",
"src_encoding": "UTF-8",
"text": "import os\r\ncurrentdir = os.getcwd()\r\nimport random\r\nimport pysces\r\nimport matplotlib.pyplot as plt\r\nimport stochpy\r\nimport numpy as np\r\nimport csv\r\nimport pandas as pd\r\nimport score as score\r\n\r\ndef playgame(currentdir,stochastic,parameter1,parameter2):\r\n #parameter 1 determines final level\r\n #parameter 2 determines\r\n #type should be 0 or 1 -> toggle between stochastic and deterministic\r\n if stochastic:\r\n #stochastic\r\n #change directory\r\n \r\n smod = stochpy.SSA()\r\n smod.Model(os.path.join(currentdir, 'mRNAproteinIulia.psc'))\r\n\r\n #change parameters\r\n smod.ChangeParameter(\"Ksynmrna\",parameter1)\r\n smod.ChangeParameter(\"Kdeg2\",parameter2)\r\n #smod.ChangeInitialSpeciesCopyNumber(\"Kdeg1\",parameter2)\r\n \r\n #smod.data_stochsim.simulation_endtime\r\n #smod.data_stochsim.simulation_timesteps = 51.0\r\n smod.Timesteps(50)\r\n smod.Endtime(50)\r\n smod.DoStochSim(end=50)\r\n \r\n #Only plot as a test\r\n #smod.PlotSpeciesTimeSeries()\r\n #pysces.plt.setAxisLabel('x', label='time')\r\n #pysces.plt.setAxisLabel('y', label='expression levels')\r\n #plt.savefig(currentdir+'/stoch.png')\r\n \r\n simvalues_stoch = smod.data_stochsim.getSpecies()\r\n x_values = range(5,51,5)\r\n \r\n #arrange results\r\n time_val = []\r\n mRNA_val = []\r\n protein_val =[]\r\n for list in simvalues_stoch:\r\n time_val.append(list[0])\r\n mRNA_val.append(list[1])\r\n protein_val.append(list[2])\r\n time_val_rounded = []\r\n for i in time_val:\r\n time_val_rounded.append(int(round(i)))\r\n count = 0\r\n mRNA_final = []\r\n protein_final = []\r\n for val in range(0,len(mRNA_val)-1):\r\n mRNA_final.append([mRNA_val[val]] * ((time_val_rounded[count+1] - time_val_rounded[count])))\r\n protein_final.append([protein_val[val]] * ((time_val_rounded[count+1] - time_val_rounded[count])))\r\n count = count + 1\r\n time_final = range(1,51)\r\n mRNA_final = flatten(mRNA_final)\r\n protein_final = flatten(protein_final)\r\n simvalues = zip(time_final,mRNA_final,protein_final)\r\n\r\n else:\r\n #deterministic\r\n print(currentdir+'pysces_determ.psc')\r\n mod = pysces.model('pysces_determ', dir=os.getcwd())\r\n #change params\r\n mod.Ksynmrna = parameter1\r\n mod.Kdeg2 = parameter2\r\n mod.doSim(end=50.0, points=50.0)\r\n mod.Simulate()\r\n #print os.getcwd(), '\\n\\n\\n'\r\n #mod.doSimPlot()\r\n \r\n #plot here to check it works but don't actually plot\r\n mod.sim_start = 1.0\r\n mod.doSimPlot(end=50.0, points=51, plot='species', fmt='lines', filename=None)\r\n #pysces.plt.p_activateInterface('matplotlib')\r\n #pysces.plt.setAxisLabel('x', label='time')\r\n #pysces.plt.setAxisLabel('y', label='expression levels')\r\n #plt.savefig(currentdir+'/simpledeterm.png')\r\n #plt.close()\r\n\r\n #get simulation values\r\n simvalues = mod.data_sim.getSpecies()\r\n# print(mod.data_sim.getSpecies())\r\n\r\n os.chdir(currentdir)\r\n with open(\"simvalues.csv\",\"wb\") as f:\r\n writer = csv.writer(f)\r\n writer.writerows(simvalues)\r\n\r\n return simvalues\r\n\r\ndef flatten(l):\r\n flat_list =[]\r\n for sublist in l:\r\n for item in sublist:\r\n flat_list.append(item)\r\n return flat_list\r\n\r\nsimvalues = playgame(currentdir, True, 50, 5)\r\n\r\n#sim_yvaluesmRNA, sim_yvaluesprotein = playgame(type)\r\n#print(sim_yvaluesmRNA, sim_yvaluesprotein,'\\n')\r\n#print simvalues\r\n\r\n\r\n#csv_file = \"simvalues.csv\"\r\n#df = pd.read_csv(csv_file)\r\n#print df(1)\r\n#included_cols = 1\r\n\r\n\r\n#mod.SimPlot(plot='species', filename=None, title=None, log=None, format='lines')\r\n\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.7501927614212036,
"alphanum_fraction": 0.7686969637870789,
"avg_line_length": 39.53125,
"blob_id": "97e4206f670b3f26c2f200f1865a39208e227942",
"content_id": "5892c8a7574e5b750bdc670f4bb23ffaf2b79a6d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1297,
"license_type": "no_license",
"max_line_length": 232,
"num_lines": 32,
"path": "/README.md",
"repo_name": "BDAthlon/2017-Triple_Helix-2",
"src_encoding": "UTF-8",
"text": "# Bio Green Globs\n\n\nHere we make parameter searching fun by creating a game where users can choose whether to simulate data using stochastic or deterministic means and vary parameters in order to create protein plots that hit certain targets, or blobs.\n\nThe game is written as a python web app with flask. To open the GUI, run the ``game.py`` file from the command line. (After you installing the requirements with ``pip install -r requirements.txt``)\n\nThe parameters are given in an form and then the new values for the graph are generated with python. The new XY coordinates are passed on to and the new graph is plotted on the fly with javascript library plotly.\n\nExamples of plot:\n\nGlob plots\n\n\n\n\nStochastic simulations\n\n\n\nDeterministic simulations\n\n\n\nThe webapp\n\n\n\n\n--- as you see it is not finished --- if only we had one more day ---\n\nThis is a project by Iulia Gherman, James Scott-Brown and Margarita Kopniczky for the 2017 DBAthlon, IWBDA, Pittsburgh.\n"
},
{
"alpha_fraction": 0.6076233386993408,
"alphanum_fraction": 0.6352765560150146,
"avg_line_length": 27.4255313873291,
"blob_id": "b7e22d50c945a16f99e7fbdcb165fd3212e247bf",
"content_id": "11bba9510d1b6e310ba27921c3d2c785411545d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1338,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 47,
"path": "/BioBlobs/createblobs.py",
"repo_name": "BDAthlon/2017-Triple_Helix-2",
"src_encoding": "UTF-8",
"text": "import random\nimport matplotlib.pyplot as plt\nimport os\nimport csv\n\ndef test(param1, param2):\n result = param2 + param1\n print result\n return result\n\ndef createblobs(bias):\n if bias:\n on_off_bias = random.choice([True, False])\n if on_off_bias: \n how_many_on = random.randint(6,10)\n else:\n how_many_on = random.randint(1,5)\n high_value = random.randint(1,300)\n blob_values = [high_value] * how_many_on + [0] * (10-how_many_on)\n random.shuffle(blob_values)\n \n else:\n blob_values = random.sample(range(1,300),10)\n x_values = range(5,51,5)\n blob_dict = dict(zip(x_values,blob_values))\n\n blob_dict = zip(x_values,blob_values)\n #plot\n #plt.plot(x_values,blob_values,'go')\n#plt.xlabel('time')\n# plt.ylabel('expression levels')\n# plt.axis([0,55,0,max(blob_values)+5])\n# plt.savefig('plot_blobs.png')\n\n #plot blob values as csv\n currentdir = os.getcwd()\n os.chdir(currentdir)\n with open(\"blobvalues.csv\",\"wb\") as f:\n writer = csv.writer(f)\n writer.writerows(blob_dict)\n print(blob_dict)\n return blob_dict\n\nbias = 0 #bias of 0 means blobs are randomly generated within range 0,300\n# otherwise they are generated so all blobs have either a high or low value\nblob_dict = createblobs(bias)\n#print blob_dict\n\n\n"
}
] | 6 |
sheldonucr/thermal_model_control_building | https://github.com/sheldonucr/thermal_model_control_building | c3e2334b1ce83468056b31e632059e63c4c96b3b | 301903446a09707e94ee1ebce0ed8fc9ea73eb21 | 2031e391459855b43e2629620add8bf2d8cfebab | refs/heads/master | 2021-01-10T03:48:18.459522 | 2015-09-25T16:31:48 | 2015-09-25T16:31:48 | 43,160,428 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5800521969795227,
"alphanum_fraction": 0.5889951586723328,
"avg_line_length": 31.861225128173828,
"blob_id": "35cf95efea4e7435c898b525668cf9fba0511b92",
"content_id": "dda6ec7d0ba8b732520144dcc44d6c10d286d8ae",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8051,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 245,
"path": "/code/rnn-method/NLSSVR.py",
"repo_name": "sheldonucr/thermal_model_control_building",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport json\nimport sys\nimport os\n\nimport numpy as NP\nimport numpy.random as RND\nimport pylab as PL\n\nimport MyUtils as MU\n\nfrom scipy import sin, rand, arange\n\nfrom pybrain.structure import FullConnection, RecurrentNetwork, IdentityConnection\nfrom pybrain.structure.modules import SigmoidLayer, LinearLayer, TanhLayer, BiasUnit\nfrom pybrain.datasets import SequentialDataSet\nfrom pybrain.supervised import RPropMinusTrainer, BackpropTrainer\nfrom pybrain.tools.validation import testOnSequenceData, ModuleValidator\nfrom pybrain.tools.shortcuts import buildNetwork\nfrom pybrain.tools.xml.networkwriter import NetworkWriter\n\ndef main():\n config = MU.ConfigReader('configs/%s' % sys.argv[1])\n config.read()\n\n logDir = '%s-%s' % (__file__, sys.argv[1])\n os.mkdir(logDir)\n\n with open('%s/config.txt' % logDir, 'w') as outfile:\n json.dump(config.getConfigDict(), outfile, indent=4)\n\n dr = MU.DataReader(config['input_tsv_path'])\n data = dr.read(config['interested_columns'])\n\n inLabels = config['input_columns']\n\n outLabels = config['output_columns']\n\n tds, vds = seqDataSetPair(data, inLabels, outLabels, config['seq_label_column'],\n config['test_seqno'], config['validation_seqno'])\n\n inScale = config.getDataScale(inLabels)\n outScale = config.getDataScale(outLabels)\n\n normalizeDataSet(tds, ins = inScale, outs = outScale)\n normalizeDataSet(vds, ins = inScale, outs = outScale)\n\n trainData = tds\n validationData = vds\n\n fdim = tds.indim / 2 + 15\n xdim = tds.outdim * 2\n\n rnn = buildNetwork(tds.indim,\n fdim, fdim, xdim,\n tds.outdim,\n hiddenclass=SigmoidLayer,\n recurrent=True)\n\n rnn.addRecurrentConnection(FullConnection(rnn['hidden2'], rnn['hidden0']))\n rnn.sortModules()\n\n trainer = RPropMinusTrainer(rnn, dataset=trainData, batchlearning=True, verbose=True, weightdecay=0.005)\n #trainer = RPropMinusTrainer(rnn, dataset=trainData, batchlearning=True, verbose=True)\n #trainer = BackpropTrainer(rnn, dataset=trainData, learningrate=0.0001,\n # lrdecay=1.0, momentum=0.4, verbose=True, batchlearning=False,\n # weightdecay=0)\n\n errTime = []\n errTrain = []\n errValidation = []\n epochNo = 0\n while True:\n\n for i in range(config['epochs_per_update']):\n trainer.train()\n\n epochNo += config['epochs_per_update']\n NetworkWriter.writeToFile(rnn, '%s/Epoch_%d.xml' % (logDir, epochNo))\n NetworkWriter.writeToFile(rnn, '%s/Latest.xml' % logDir)\n\n tOut = ModuleValidator.calculateModuleOutput(rnn, trainData)\n vOut = ModuleValidator.calculateModuleOutput(rnn, validationData)\n\n tScaler = config.getDataScale([config['output_scalar_label']])[0][1]\n tAvgErr = NP.sqrt(NP.mean((trainData['target'] - tOut) ** 2)) * tScaler\n vAvgErr = NP.sqrt(NP.mean((validationData['target'] - vOut) ** 2)) * tScaler\n\n tMaxErr = NP.max(NP.abs(trainData['target'] - tOut)) * tScaler\n vMaxErr = NP.max(NP.abs(validationData['target'] - vOut)) * tScaler\n\n errTrain.append(tAvgErr)\n errValidation.append(vAvgErr)\n errTime.append(epochNo)\n\n print \"Training error: avg %5.3f degC max %5.3f degC\" % (tAvgErr, tMaxErr)\n print \"Validation error: avg %5.3f degC max %5.3f degC\" % (vAvgErr, vMaxErr)\n print \"------------------------------------------------------------------------------\"\n\n if (config['visualize_on_training'] == 'yes'):\n\n PL.figure(1)\n PL.ioff()\n visulizeDataSet(rnn, trainData, 0,\n config['visualized_columns']['input'],\n config['visualized_columns']['output'])\n PL.ion()\n PL.draw()\n\n PL.figure(2)\n PL.ioff()\n visulizeDataSet(rnn, validationData, 0,\n config['visualized_columns']['input'],\n config['visualized_columns']['output'])\n PL.ion()\n PL.draw()\n\n p = PL.figure(3)\n PL.ioff()\n p.clear()\n PL.plot(errTime, errTrain, label = 'Train')\n PL.plot(errTime, errValidation, label = 'Validation')\n PL.legend()\n PL.ion()\n PL.draw()\n\ndef addSubNet(nn, prefix, indim, xdim, outdim):\n\n np = prefix + '_'\n\n nn.addModule(LinearLayer(indim, name=np+'in'))\n nn.addModule(LinearLayer(outdim, name=np+'out'))\n\n nn.addModule(SigmoidLayer(indim + xdim, name=np+'f0'))\n nn.addModule(SigmoidLayer(indim + xdim, name=np+'f1'))\n nn.addModule(SigmoidLayer(indim + xdim, name=np+'f2'))\n nn.addModule(SigmoidLayer(indim + xdim, name=np+'f3'))\n nn.addModule(SigmoidLayer(xdim, name=np+'x'))\n\n nn.addConnection(FullConnection(nn[np+'in'], nn[np+'f0'], outSliceTo=indim))\n\n nn.addConnection(FullConnection(nn[np+'f0'], nn[np+'f1'], name=np+'f0~f1'))\n nn.addConnection(FullConnection(nn[np+'f1'], nn[np+'f2'], name=np+'f1~f2'))\n nn.addConnection(FullConnection(nn[np+'f2'], nn[np+'f3'], name=np+'f2~f3'))\n nn.addConnection(FullConnection(nn[np+'f3'], nn[np+'x'], name=np+'f3~x'))\n\n nn.addRecurrentConnection(FullConnection(nn[np+'x'], nn[np+'f0'], outSliceFrom=indim))\n\n for i in range(outdim):\n nn.addConnection(FullConnection(nn[np+'x'], nn[np+'out'], inSliceFrom=i, inSliceTo=i+1, outSliceFrom=i, outSliceTo=i+1))\n nn.addConnection(FullConnection(nn['b'], nn[np+'out']))\n\ndef normalizeDataSet(data, ins = None, outs = None):\n inscale = []\n outscale = []\n\n for i in range(data.indim):\n if ins == None:\n mu = NP.mean(data['input'][:, i])\n sigma = NP.std(data['input'][:, i])\n else:\n mu, sigma = ins[i]\n\n data['input'][:, i] -= mu\n data['input'][:, i] /= sigma\n\n inscale.append((mu, sigma))\n\n for i in range(data.outdim):\n\n if outs == None:\n\n maxPossible = NP.max(data['target'][:, i])\n minPossible = NP.min(data['target'][:, i])\n mu = minPossible\n sigma = maxPossible - minPossible\n else:\n mu, sigma = outs[i]\n\n data['target'][:, i] -= mu\n data['target'][:, i] /= sigma\n\n outscale.append((mu, sigma))\n\n return (inscale, outscale)\n\ndef visulizeDataSet(network, data, seqno, in_labels, out_labels):\n\n seq = data.getSequence(seqno)\n tmpDs = SequentialDataSet(data.indim, data.outdim)\n tmpDs.newSequence()\n\n for i in xrange(data.getSequenceLength(seqno)):\n tmpDs.addSample(seq[0][i], seq[1][i])\n\n nplots = len(in_labels) + len(out_labels)\n\n for i in range(len(in_labels)):\n p = PL.subplot(nplots, 1, i + 1)\n p.clear()\n p.plot(tmpDs['input'][:, i])\n p.set_ylabel(in_labels[i])\n\n for i in range(len(out_labels)):\n p = PL.subplot(nplots, 1, i + 1 + len(in_labels))\n p.clear()\n\n output = ModuleValidator.calculateModuleOutput(network, tmpDs)\n\n p.plot(tmpDs['target'][:, i], label='train')\n p.plot(output[:, i], label='sim')\n\n p.legend()\n p.set_ylabel(out_labels[i])\n\ndef seqDataSetPair(data, in_labels, out_labels, seq_title, tseqs, vseqs):\n\n tds = SequentialDataSet(len(in_labels), len(out_labels))\n vds = SequentialDataSet(len(in_labels), len(out_labels))\n ds = None\n\n for i in xrange(len(data[in_labels[0]])):\n\n if i == 0 or data[seq_title][i] != data[seq_title][i - 1]:\n if int(data[seq_title][i]) in tseqs:\n ds = tds\n ds.newSequence()\n elif int(data[seq_title][i]) in vseqs:\n ds = vds\n ds.newSequence()\n else:\n ds = None\n\n if ds == None: continue\n\n din = [data[l][i] for l in in_labels]\n dout = [data[l][i] for l in out_labels]\n\n ds.addSample(din, dout)\n\n return (tds, vds)\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7702702879905701,
"alphanum_fraction": 0.7702702879905701,
"avg_line_length": 48.33333206176758,
"blob_id": "4e6b6949d9640d295fc5cf380e72c99fcfc9aff1",
"content_id": "f72bd28c223ff47f1cdbe4c5c1f8c2e7353c4f97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 148,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 3,
"path": "/README.md",
"repo_name": "sheldonucr/thermal_model_control_building",
"src_encoding": "UTF-8",
"text": "# EnergyPlus Modeling Repository\n\nIn this repository, we can find the code for our work under ./code/, the related documents are located at ./docs/\n"
},
{
"alpha_fraction": 0.6473429799079895,
"alphanum_fraction": 0.6618357300758362,
"avg_line_length": 12.354838371276855,
"blob_id": "fdea1d27403c0ee688a05d88ff49bdb7a52c78e5",
"content_id": "da4f355ca647bee79e6151e1af9ccdb3d40e4648",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 414,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 31,
"path": "/script/run.sh",
"repo_name": "sheldonucr/thermal_model_control_building",
"src_encoding": "UTF-8",
"text": "#/bin/bash\n\n#tmp folder - $dir/tmp\n#input folder - $dir/input\n\n#input parameters\ninputfile='test.txt'\n\ndir=$PWD\n\nif [ -f $dir/sample/$inputfile ] \nthen\n\techo 'File found';\nelse\n\techo 'File Not Found - Exiting program';\n\texit;\nfi\n\n#clean tmp folder\nrm -rf $dir/tmp\nmkdir -p tmp\n\n#test\ncd $dir/tmp\nfor i in {1..100}\ndo\n\tm4 -DFILE_NUMBER=1.${i} $dir/sample/test.txt > ${i}_${inputfile}\ndone\ncd ..\n\necho 'Script Done'\n"
},
{
"alpha_fraction": 0.4959128201007843,
"alphanum_fraction": 0.5104450583457947,
"avg_line_length": 26.172840118408203,
"blob_id": "fc438df614b9261a06d306d5c83f979e49e7bf0c",
"content_id": "79aa0dd272b32435d3051ee5f58f316542e79430",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2202,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 81,
"path": "/code/rnn-method/MyUtils.py",
"repo_name": "sheldonucr/thermal_model_control_building",
"src_encoding": "UTF-8",
"text": "import pylab\nimport scipy\nimport json\n\nclass DataReader:\n def __init__(self, fileName):\n self._fileName = fileName\n\n def read(self, keys):\n data = {}\n\n with open(self._fileName) as f:\n\n # title line\n line = f.readline()\n\n titles = map(lambda s: s.strip(), line.split('\\t'))\n keyMap = {}\n for k in keys:\n keyMap[k] = titles.index(k)\n data[k] = []\n\n # data section\n while True:\n line = f.readline()\n if line == '': break;\n elements = map(lambda s: s.strip(), line.split('\\t'))\n\n for k, idx in keyMap.iteritems():\n data[k].append(elements[idx])\n return data\n pass\n\nclass ConfigReader:\n def __init__(self, fileName):\n self._fileName = fileName\n self._data = None\n pass\n\n def read(self):\n if self._data == None:\n with open(self._fileName) as data_file:\n self._data = json.load(data_file)\n return self._data\n\n def getConfigDict(self):\n return self.read()\n\n def __getitem__(self, k):\n db = self.read()\n return db[k]\n\n def getDataScale(self, labels):\n db = self.read()['data_scales']\n return [db[i] for i in labels]\n\nclass DataPlotter:\n def __init__(self): pass\n\n def plotNoHVACFromFile(self, fileName, nDays = 14):\n dr = DataReader(fileName)\n data = dr.read(['outdoor', 'space1', 'space2', 'space3', 'space4', 'space5', 'month', 'day', 'hour'])\n\n self.plotNoHVAC(data, nDays)\n\n def plotNoHVAC(self, data, nDays = 14):\n\n pylab.figure(1)\n plt = pylab.subplot(6,1,1)\n plt.vlines(range(24 * nDays), scipy.arange(24 * nDays) * 0, data['outdoor'][0:24*nDays])\n plt.plot(data['outdoor'][0:24*nDays])\n plt.set_xlabel('time (s)')\n plt.set_ylabel('Outdoor (C)')\n\n for i in scipy.arange(5) + 1:\n plt = pylab.subplot(6, 1, i + 1)\n plt.plot(data['space' + str(i)][0:24*nDays])\n plt.set_xlabel('time (s)')\n plt.set_ylabel('Space #%d (C)' % i)\n\n pylab.show()\n\n"
},
{
"alpha_fraction": 0.8045976758003235,
"alphanum_fraction": 0.8045976758003235,
"avg_line_length": 28,
"blob_id": "de6078e1cbf800db53d7390aff50ff405c52b0ae",
"content_id": "47145e48dcaa6a13d3be6cd549639a872d1b5684",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 87,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 3,
"path": "/script/README.md",
"repo_name": "sheldonucr/thermal_model_control_building",
"src_encoding": "UTF-8",
"text": "# Scripts\n\nThis subdirectory contains work done using the US. DOE EnergyPlus software.\n"
},
{
"alpha_fraction": 0.5248676538467407,
"alphanum_fraction": 0.5310444235801697,
"avg_line_length": 34.82183837890625,
"blob_id": "1c65c3f10be368345257bc3ba4ae2b214176f90d",
"content_id": "722971338f47a48c1839cb8d2768407a7152633e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12466,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 348,
"path": "/code/rnn-method/RecoverNN.py",
"repo_name": "sheldonucr/thermal_model_control_building",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport json\nimport sys\nimport os\nimport re\n\nimport numpy as NP\nimport numpy.random as RND\nimport pylab as PL\n\nimport MyUtils as MU\n\nfrom scipy import sin, rand, arange\n\nfrom pybrain.structure.modules import LSTMLayer, SoftmaxLayer, SigmoidLayer\nfrom pybrain.datasets import SequentialDataSet\nfrom pybrain.supervised import RPropMinusTrainer, BackpropTrainer\nfrom pybrain.tools.validation import testOnSequenceData, ModuleValidator\nfrom pybrain.tools.shortcuts import buildNetwork\nfrom pybrain.tools.xml.networkwriter import NetworkWriter\nfrom pybrain.tools.xml.networkreader import NetworkReader\n\ndef main():\n logDir = sys.argv[1]\n config = MU.ConfigReader('%s/%s' % (logDir, 'config.txt'))\n config.read()\n\n dr = MU.DataReader(config['input_tsv_path'])\n data = dr.read(config['interested_columns'])\n\n inLabels = config['input_columns']\n\n outLabels = config['output_columns']\n\n tds, vds = seqDataSetPair(data, inLabels, outLabels,\n config['seq_label_column'], config['test_seqno'],\n config['validation_seqno'])\n\n inScale = config.getDataScale(inLabels)\n outScale = config.getDataScale(outLabels)\n\n normalizeDataSet(tds, ins = inScale, outs = outScale)\n normalizeDataSet(vds, ins = inScale, outs = outScale)\n\n trainData = tds\n validationData = vds\n\n rnn = NetworkReader.readFrom('%s/%s' % (logDir, 'Latest.xml'))\n\n tOut = ModuleValidator.calculateModuleOutput(rnn, trainData)\n vOut = ModuleValidator.calculateModuleOutput(rnn, validationData)\n\n tScaler = config.getDataScale([config['output_scalar_label']])[0][1]\n tAvgErr = NP.sqrt(NP.mean((trainData['target'] - tOut) ** 2)) * tScaler\n vAvgErr = NP.sqrt(NP.mean((validationData['target'] - vOut) ** 2)) * tScaler\n\n tMaxErr = NP.max(NP.abs(trainData['target'] - tOut)) * tScaler\n vMaxErr = NP.max(NP.abs(validationData['target'] - vOut)) * tScaler\n\n print \"Training error: avg %5.3f degC max %5.3f degC\" % (tAvgErr, tMaxErr)\n print \"Validation error: avg %5.3f degC max %5.3f degC\" % (vAvgErr, vMaxErr)\n\n if len(sys.argv) == 3:\n filename = sys.argv[2]\n outbuf = []\n\n column_titles = [config['seq_label_column'], \"training\", \"validation\"]\n column_titles += config['input_columns']\n column_titles += map(lambda s: \"%s_actual\" % s, config['output_columns'])\n column_titles += map(lambda s: \"%s_simulated\" % s, config['output_columns'])\n\n outbuf.append(reduce(lambda lhs, rhs: \"%s\\t%s\" % (lhs, rhs), column_titles))\n\n for data in [tds, vds]:\n print \"Generating %s set curves...\" % (\"training\" if data==tds else \"validation\")\n for seqno in xrange(data.getNumSequences()):\n print \" %s sequence #%d...\" % ((\"Training\" if data==tds else \"Validation\"), seqno)\n seq = data.getSequence(seqno)\n tmpDs = SequentialDataSet(data.indim, data.outdim)\n tmpDs.newSequence()\n for i in xrange(data.getSequenceLength(seqno)):\n tmpDs.addSample(seq[0][i], seq[1][i])\n\n output = ModuleValidator.calculateModuleOutput(rnn, tmpDs)\n\n # denormalize\n for i in range(len(config['input_columns'])):\n tmpDs['input'][:, i] *= inScale[i][1]\n tmpDs['input'][:, i] += inScale[i][0]\n for i in range(len(config['output_columns'])):\n tmpDs['target'][:, i] *= outScale[i][1]\n tmpDs['target'][:, i] += outScale[i][0]\n\n output[:, i] *= outScale[i][1]\n output[:, i] += outScale[i][0]\n\n for i in xrange(data.getSequenceLength(seqno)):\n\n line = []\n line += [seqno, 1 if data==tds else 0, 1 if data==vds else 0]\n line += tmpDs.getSample(i)[0].tolist()\n line += tmpDs.getSample(i)[1].tolist()\n line += output[i].tolist()\n outbuf.append(reduce(lambda lhs, rhs: \"%s\\t%s\" % (lhs, rhs), line))\n\n pass # for\n pass # for\n\n print \"Writing results into file '%s'...\" % filename\n with open(filename, \"w\") as f:\n for line in outbuf:\n print >> f, line\n return\n\n while True:\n s = raw_input(\"\"\"\nPlease type what you want to do:\n <N> ------------ Plot the N-th normalized curves in input data set\n <{T|V} N> ------ Plot the N-th normalized curves in Train|Validation set\n <W filename> --- Write the output to file 'filename'\n\"\"\")\n s = s.strip()\n if s == '': continue\n\n m = re.match('(\\d+)', s)\n if m:\n seqno = int(m.group(1))\n mIndex = None\n mData = None\n mString = None\n if seqno in config['test_seqno']:\n mIndex = config['test_seqno'].index(seqno)\n mData = trainData\n mString = \"training\"\n elif seqno in config['validation_seqno']:\n mIndex = config['validation_seqno'].index(seqno)\n mData = validationData\n mString = \"validation\"\n else:\n print \"Sequence #%d is not available.\" % seqno\n continue\n\n print \"Visualizing Sequence #%d (%s data)...\" % (seqno, mString)\n\n PL.figure(1)\n visulizeDataSet(rnn, mData, mIndex,\n config['visualized_columns']['input'],\n outLabels, inLabels, outLabels)\n PL.suptitle(\"Sequence #%d (%s)\" % (seqno, mString))\n PL.show()\n continue\n\n m = re.match('([TVtv])(\\d+)', s)\n if m:\n\n dsString = None\n dsData = None\n dsSeqNO = int(m.group(2))\n dsRealSeq = None\n\n if m.group(1).upper() == 'T':\n dsString = 'vraining'\n dsData = trainData\n dsRealSeq = config['test_seqno'][dsSeqNO]\n\n else:\n dsString = 'validation'\n dsData = validationData\n dsRealSeq = config['test_seqno'][dsSeqNO]\n\n if not dsSeqNO in range(dsData.getNumSequences()):\n print \"Sequence #%d is not available.\" % dsSeqNO\n continue\n\n print \"Visualizing %s sequence #%d (realseq #%d)...\" % (dsString, dsSeqNO, dsRealSeq)\n PL.figure(1)\n visulizeDataSet(rnn, dsData, dsSeqNO,\n config['visualized_columns']['input'], outLabels, inLabels,\n outLabels)\n PL.suptitle(\"Sequence #%d in %s dataset (realseq #%d)\" % (dsSeqNO, dsString, dsRealSeq))\n visulizeAll5Inputs(rnn, dsData, dsSeqNO, inLabels)\n PL.show()\n\n continue\n\n m = re.match('[Ww]\\s+(\\S+)', s)\n if m:\n filename = \"%s.tsv\" % m.group(1)\n outbuf = []\n\n column_titles = [config['seq_label_column'], \"training\", \"validation\"]\n column_titles += config['input_columns']\n column_titles += map(lambda s: \"%s_actual\" % s, config['output_columns'])\n column_titles += map(lambda s: \"%s_simulated\" % s, config['output_columns'])\n\n outbuf.append(reduce(lambda lhs, rhs: \"%s\\t%s\" % (lhs, rhs), column_titles))\n\n for data in [tds, vds]:\n print \"Generating %s set curves...\" % (\"training\" if data==tds else \"validation\")\n for seqno in xrange(data.getNumSequences()):\n print \" %s sequence #%d...\" % ((\"Training\" if data==tds else \"Validation\"), seqno)\n seq = data.getSequence(seqno)\n tmpDs = SequentialDataSet(data.indim, data.outdim)\n tmpDs.newSequence()\n for i in xrange(data.getSequenceLength(seqno)):\n tmpDs.addSample(seq[0][i], seq[1][i])\n\n output = ModuleValidator.calculateModuleOutput(rnn, tmpDs)\n\n # denormalize\n for i in range(len(config['input_columns'])):\n tmpDs['input'][:, i] *= inScale[i][1]\n tmpDs['input'][:, i] += inScale[i][0]\n for i in range(len(config['output_columns'])):\n tmpDs['target'][:, i] *= outScale[i][1]\n tmpDs['target'][:, i] += outScale[i][0]\n\n output[:, i] *= outScale[i][1]\n output[:, i] += outScale[i][0]\n\n for i in xrange(data.getSequenceLength(seqno)):\n\n line = []\n line += [seqno, 1 if data==tds else 0, 1 if data==vds else 0]\n line += tmpDs.getSample(i)[0].tolist()\n line += tmpDs.getSample(i)[1].tolist()\n line += output[i].tolist()\n outbuf.append(reduce(lambda lhs, rhs: \"%s\\t%s\" % (lhs, rhs), line))\n\n pass # for\n pass # for\n\n print \"Writing results into file '%s'...\" % filename\n with open(filename, \"w\") as f:\n for line in outbuf:\n print >> f, line\n continue # file exporting\n\n print \"Invalid input. Examples: 3, 9, T10, V8, t11, v1, w output\"\n\ndef normalizeDataSet(data, ins = None, outs = None):\n inscale = []\n outscale = []\n\n for i in range(data.indim):\n if ins == None:\n mu = NP.mean(data['input'][:, i])\n sigma = NP.std(data['input'][:, i])\n else:\n mu, sigma = ins[i]\n\n data['input'][:, i] -= mu\n data['input'][:, i] /= sigma\n\n inscale.append((mu, sigma))\n\n for i in range(data.outdim):\n\n if outs == None:\n\n maxPossible = NP.max(data['target'][:, i])\n minPossible = NP.min(data['target'][:, i])\n mu = minPossible\n sigma = maxPossible - minPossible\n else:\n mu, sigma = outs[i]\n\n data['target'][:, i] -= mu\n data['target'][:, i] /= sigma\n\n outscale.append((mu, sigma))\n\n return (inscale, outscale)\n\ndef visulizeAll5Inputs(network, data, seqno, labels):\n figNO = 100\n for i in [1,2,3,4,5]:\n PL.figure(figNO)\n visulizeInput(network, data, seqno, labels, \"_%d\" % i)\n figNO += 1\n\ndef visulizeInput(network, data, seqno, labels, keyword):\n inLabels = []\n for l in labels:\n if re.search(keyword, l):\n inLabels.append(l)\n\n visulizeDataSet(network, data, seqno, inLabels, [], labels, [])\n\ndef visulizeDataSet(network, data, seqno, in_labels, out_labels, in_pool, out_pool):\n\n seq = data.getSequence(seqno)\n tmpDs = SequentialDataSet(data.indim, data.outdim)\n tmpDs.newSequence()\n\n for i in xrange(data.getSequenceLength(seqno)):\n tmpDs.addSample(seq[0][i], seq[1][i])\n\n nplots = len(in_labels) + len(out_labels)\n\n for i in range(len(in_labels)):\n p = PL.subplot(nplots, 1, i + 1)\n p.clear()\n p.plot(tmpDs['input'][:, in_pool.index(in_labels[i])])\n p.set_ylabel(in_labels[i])\n\n for i in range(len(out_labels)):\n p = PL.subplot(nplots, 1, i + 1 + len(in_labels))\n p.clear()\n\n output = ModuleValidator.calculateModuleOutput(network, tmpDs)\n\n p.plot(tmpDs['target'][:, out_pool.index(out_labels[i])], label='train')\n p.plot(output[:, out_pool.index(out_labels[i])], label='sim')\n\n p.legend()\n p.set_ylabel(out_labels[i])\n\ndef seqDataSetPair(data, in_labels, out_labels, seq_title, tseqs, vseqs):\n\n tds = SequentialDataSet(len(in_labels), len(out_labels))\n vds = SequentialDataSet(len(in_labels), len(out_labels))\n ds = None\n\n for i in xrange(len(data[in_labels[0]])):\n\n if i == 0 or data[seq_title][i] != data[seq_title][i - 1]:\n if int(data[seq_title][i]) in tseqs:\n ds = tds\n ds.newSequence()\n elif int(data[seq_title][i]) in vseqs:\n ds = vds\n ds.newSequence()\n else:\n ds = None\n\n if ds == None: continue\n\n din = [data[l][i] for l in in_labels]\n dout = [data[l][i] for l in out_labels]\n\n ds.addSample(din, dout)\n\n return (tds, vds)\n\nif __name__ == '__main__':\n main()\n"
}
] | 6 |
phoeinx/EvaP | https://github.com/phoeinx/EvaP | ce2bdf00350029075b14fdc8dad889f0b88c2714 | a52fa1d30e17c9f21631a44d0cdabdf91759bc57 | 986e634da4d70b665889bf7f98f99b3df99dbdb7 | refs/heads/master | 2021-05-13T13:17:47.735349 | 2018-05-09T07:24:14 | 2018-05-09T07:24:14 | 116,702,095 | 0 | 0 | null | 2018-01-08T16:41:35 | 2018-01-04T21:05:10 | 2018-01-07T12:22:22 | null | [
{
"alpha_fraction": 0.7174721360206604,
"alphanum_fraction": 0.7362781763076782,
"avg_line_length": 56.515724182128906,
"blob_id": "4a332de7b1f019afdc798223e0880f96ef0cbaef",
"content_id": "a6ceb3cd8a9f785bc15543a4225a5796291d4e1d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9146,
"license_type": "permissive",
"max_line_length": 158,
"num_lines": 159,
"path": "/evap/results/tests/test_tools.py",
"repo_name": "phoeinx/EvaP",
"src_encoding": "UTF-8",
"text": "\nfrom django.test.testcases import TestCase\nfrom django.core.cache import caches\nfrom django.conf import settings\nfrom django.test import override_settings\n\nfrom model_mommy import mommy\n\nfrom evap.evaluation.models import Contribution, RatingAnswerCounter, Questionnaire, Question, Course, UserProfile\nfrom evap.results.tools import get_answers, get_answers_from_answer_counters, get_results_cache_key, calculate_average_grades_and_deviation, calculate_results\nfrom evap.staff.tools import merge_users\n\n\nclass TestCalculateResults(TestCase):\n def test_caches_published_course(self):\n course = mommy.make(Course, state='published')\n\n self.assertIsNone(caches['results'].get(get_results_cache_key(course)))\n\n calculate_results(course)\n\n self.assertIsNotNone(caches['results'].get(get_results_cache_key(course)))\n\n def test_cache_unpublished_course(self):\n course = mommy.make(Course, state='published')\n calculate_results(course)\n course.unpublish()\n\n self.assertIsNone(caches['results'].get(get_results_cache_key(course)))\n\n def test_calculation_results(self):\n contributor1 = mommy.make(UserProfile)\n student = mommy.make(UserProfile)\n\n course = mommy.make(Course, state='published', participants=[student, contributor1])\n questionnaire = mommy.make(Questionnaire)\n question = mommy.make(Question, questionnaire=questionnaire, type=\"G\")\n contribution1 = mommy.make(Contribution, contributor=contributor1, course=course, questionnaires=[questionnaire])\n\n mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=1, count=5)\n mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=2, count=15)\n mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=3, count=40)\n mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=4, count=60)\n mommy.make(RatingAnswerCounter, question=question, contribution=contribution1, answer=5, count=30)\n\n results = calculate_results(course)\n\n self.assertEqual(len(results), 1)\n self.assertEqual(len(results[0].results), 1)\n result = results[0].results[0]\n\n self.assertEqual(result.total_count, 150)\n self.assertAlmostEqual(result.average, float(109) / 30)\n self.assertAlmostEqual(result.deviation, 1.015983376941878)\n\n def test_calculate_results_after_user_merge(self):\n \"\"\" Asserts that merge_users leaves the results cache in a consistent state. Regression test for #907 \"\"\"\n contributor = mommy.make(UserProfile)\n main_user = mommy.make(UserProfile)\n student = mommy.make(UserProfile)\n\n course = mommy.make(Course, state='published', participants=[student])\n questionnaire = mommy.make(Questionnaire)\n mommy.make(Question, questionnaire=questionnaire, type=\"G\")\n mommy.make(Contribution, contributor=contributor, course=course, questionnaires=[questionnaire])\n\n calculate_results(course)\n\n merge_users(main_user, contributor)\n\n results = calculate_results(course)\n\n for section in results:\n self.assertTrue(Contribution.objects.filter(course=course, contributor=section.contributor).exists())\n\n def test_answer_counting(self):\n contributor1 = mommy.make(UserProfile)\n contributor2 = mommy.make(UserProfile)\n student = mommy.make(UserProfile)\n\n course1 = mommy.make(Course, state='published', participants=[student, contributor1])\n questionnaire = mommy.make(Questionnaire)\n question1 = mommy.make(Question, questionnaire=questionnaire, type=\"G\")\n question2 = mommy.make(Question, questionnaire=questionnaire, type=\"G\")\n contribution1 = mommy.make(Contribution, contributor=contributor1, course=course1, questionnaires=[questionnaire])\n contribution2 = mommy.make(Contribution, contributor=contributor1, questionnaires=[questionnaire])\n contribution3 = mommy.make(Contribution, contributor=contributor2, course=course1, questionnaires=[questionnaire])\n\n rating_answer_counters = []\n rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=1, count=1))\n rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=3, count=4))\n rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=4, count=2))\n rating_answer_counters.append(mommy.make(RatingAnswerCounter, question=question1, contribution=contribution1, answer=5, count=3))\n\n # create some unrelated answer counters for different questions / contributions\n mommy.make(RatingAnswerCounter, question=question1, contribution=contribution2, answer=1, count=1)\n mommy.make(RatingAnswerCounter, question=question1, contribution=contribution3, answer=1, count=1)\n mommy.make(RatingAnswerCounter, question=question2, contribution=contribution1, answer=1, count=1)\n\n answer_counters = get_answers(contribution1, question1)\n self.assertSetEqual(set(rating_answer_counters), set(answer_counters))\n\n answers = get_answers_from_answer_counters(answer_counters)\n self.assertListEqual(answers, [1, 3, 3, 3, 3, 4, 4, 5, 5, 5])\n\n @override_settings(CONTRIBUTION_PERCENTAGE=0.3, GRADE_PERCENTAGE=0.6)\n def test_average_grades(self):\n contributor1 = mommy.make(UserProfile)\n contributor2 = mommy.make(UserProfile)\n\n course = mommy.make(Course)\n questionnaire = mommy.make(Questionnaire)\n question_grade = mommy.make(Question, questionnaire=questionnaire, type=\"G\")\n question_likert = mommy.make(Question, questionnaire=questionnaire, type=\"L\")\n general_contribution = mommy.make(Contribution, contributor=None, course=course, questionnaires=[questionnaire])\n contribution1 = mommy.make(Contribution, contributor=contributor1, course=course, questionnaires=[questionnaire])\n contribution2 = mommy.make(Contribution, contributor=contributor2, course=course, questionnaires=[questionnaire])\n\n mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution1, answer=1, count=1)\n mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution2, answer=4, count=2)\n mommy.make(RatingAnswerCounter, question=question_likert, contribution=contribution1, answer=3, count=4)\n mommy.make(RatingAnswerCounter, question=question_likert, contribution=general_contribution, answer=5, count=3)\n\n total_likert = settings.CONTRIBUTION_PERCENTAGE * 3 + (1 - settings.CONTRIBUTION_PERCENTAGE) * 5\n total_grade = 2.5\n total = settings.GRADE_PERCENTAGE * total_grade + (1 - settings.GRADE_PERCENTAGE) * total_likert\n\n average, deviation = calculate_average_grades_and_deviation(course)\n\n self.assertAlmostEqual(average, total)\n self.assertAlmostEqual(deviation, 0)\n\n @override_settings(CONTRIBUTION_PERCENTAGE=0.3, GRADE_PERCENTAGE=0.6)\n def test_average_deviation(self):\n contributor1 = mommy.make(UserProfile)\n contributor2 = mommy.make(UserProfile)\n\n course = mommy.make(Course)\n questionnaire = mommy.make(Questionnaire)\n question_grade = mommy.make(Question, questionnaire=questionnaire, type=\"G\")\n question_likert = mommy.make(Question, questionnaire=questionnaire, type=\"L\")\n general_contribution = mommy.make(Contribution, contributor=None, course=course, questionnaires=[questionnaire])\n contribution1 = mommy.make(Contribution, contributor=contributor1, course=course, questionnaires=[questionnaire])\n contribution2 = mommy.make(Contribution, contributor=contributor2, course=course, questionnaires=[questionnaire])\n\n mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution1, answer=1, count=1)\n mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution1, answer=3, count=1)\n mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution2, answer=4, count=2)\n mommy.make(RatingAnswerCounter, question=question_grade, contribution=contribution2, answer=2, count=2)\n mommy.make(RatingAnswerCounter, question=question_likert, contribution=contribution1, answer=3, count=4)\n mommy.make(RatingAnswerCounter, question=question_likert, contribution=contribution1, answer=5, count=4)\n mommy.make(RatingAnswerCounter, question=question_likert, contribution=general_contribution, answer=5, count=3)\n\n __, deviation = calculate_average_grades_and_deviation(course)\n\n total_likert_dev = settings.CONTRIBUTION_PERCENTAGE * 1 + (1 - settings.CONTRIBUTION_PERCENTAGE) * 0\n total_grade_dev = 1\n total_dev = settings.GRADE_PERCENTAGE * total_grade_dev + (1 - settings.GRADE_PERCENTAGE) * total_likert_dev\n\n self.assertAlmostEqual(deviation, total_dev)\n"
},
{
"alpha_fraction": 0.66901034116745,
"alphanum_fraction": 0.6713982224464417,
"avg_line_length": 41.111732482910156,
"blob_id": "35f0c4de473c2988fdc4df930faeda7a134488df",
"content_id": "46e8b130ad34482e5eed4061a7fe8c2feadb6e8f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7538,
"license_type": "permissive",
"max_line_length": 145,
"num_lines": 179,
"path": "/evap/results/views.py",
"repo_name": "phoeinx/EvaP",
"src_encoding": "UTF-8",
"text": "from collections import OrderedDict, namedtuple\n\nfrom django.core.exceptions import PermissionDenied\nfrom django.shortcuts import get_object_or_404, render\nfrom django.contrib.auth.decorators import login_required\n\nfrom evap.evaluation.models import Semester, Degree, Contribution\nfrom evap.evaluation.auth import internal_required\nfrom evap.results.tools import calculate_results, calculate_average_grades_and_deviation, TextResult, RatingResult, \\\n HeadingResult, COMMENT_STATES_REQUIRED_FOR_VISIBILITY, YesNoResult\n\n\n@internal_required\ndef index(request):\n semesters = Semester.get_all_with_published_courses()\n\n return render(request, \"results_index.html\", dict(semesters=semesters))\n\n\n@internal_required\ndef semester_detail(request, semester_id):\n semester = get_object_or_404(Semester, id=semester_id)\n\n visible_states = ['published']\n if request.user.is_reviewer:\n visible_states += ['in_evaluation', 'evaluated', 'reviewed']\n\n courses = semester.course_set.filter(state__in=visible_states).prefetch_related(\"degrees\")\n\n courses = [course for course in courses if course.can_user_see_course(request.user)]\n\n # Annotate each course object with its grades.\n for course in courses:\n course.avg_grade, course.avg_deviation = calculate_average_grades_and_deviation(course)\n\n CourseTuple = namedtuple('CourseTuple', ('courses', 'single_results'))\n\n courses_by_degree = OrderedDict()\n for degree in Degree.objects.all():\n courses_by_degree[degree] = CourseTuple([], [])\n for course in courses:\n if course.is_single_result:\n for degree in course.degrees.all():\n section = calculate_results(course)[0]\n result = section.results[0]\n courses_by_degree[degree].single_results.append((course, result))\n else:\n for degree in course.degrees.all():\n courses_by_degree[degree].courses.append(course)\n\n template_data = dict(semester=semester, courses_by_degree=courses_by_degree)\n return render(request, \"results_semester_detail.html\", template_data)\n\n\n@login_required\ndef course_detail(request, semester_id, course_id):\n semester = get_object_or_404(Semester, id=semester_id)\n course = get_object_or_404(semester.course_set, id=course_id, semester=semester)\n\n if not course.can_user_see_results(request.user):\n raise PermissionDenied\n\n sections = calculate_results(course)\n\n if request.user.is_reviewer:\n public_view = request.GET.get('public_view') != 'false' # if parameter is not given, show public view.\n else:\n public_view = request.GET.get('public_view') == 'true' # if parameter is not given, show own view.\n\n # If grades are not published, there is no public view\n if not course.can_publish_grades:\n public_view = False\n\n represented_users = list(request.user.represented_users.all())\n represented_users.append(request.user)\n\n show_grades = request.user.is_reviewer or course.can_publish_grades\n\n # filter text answers\n for section in sections:\n results = []\n for result in section.results:\n if isinstance(result, TextResult):\n answers = [answer for answer in result.answers if user_can_see_text_answer(request.user, represented_users, answer, public_view)]\n if answers:\n results.append(TextResult(question=result.question, answers=answers))\n else:\n results.append(result)\n section.results[:] = results\n\n # filter empty headings\n for section in sections:\n filtered_results = []\n for index in range(len(section.results)):\n result = section.results[index]\n # filter out if there are no more questions or the next question is also a heading question\n if isinstance(result, HeadingResult):\n if index == len(section.results) - 1 or isinstance(section.results[index + 1], HeadingResult):\n continue\n filtered_results.append(result)\n section.results[:] = filtered_results\n\n # remove empty sections\n sections = [section for section in sections if section.results]\n\n # group by contributor\n course_sections_top = []\n course_sections_bottom = []\n contributor_sections = OrderedDict()\n for section in sections:\n if section.contributor is None:\n if section.questionnaire.is_below_contributors:\n course_sections_bottom.append(section)\n else:\n course_sections_top.append(section)\n else:\n contributor_sections.setdefault(section.contributor,\n {'total_votes': 0, 'sections': []})['sections'].append(section)\n\n for result in section.results:\n if isinstance(result, TextResult):\n contributor_sections[section.contributor]['total_votes'] += 1\n elif isinstance(result, RatingResult) or isinstance(result, YesNoResult):\n # Only count rating results if we show the grades.\n if show_grades:\n contributor_sections[section.contributor]['total_votes'] += result.total_count\n\n # Show a warning if course is still in evaluation (for reviewer preview).\n evaluation_warning = course.state != 'published'\n\n # Results for a course might not be visible because there are not enough answers\n # but it can still be \"published\" e.g. to show the comment results to contributors.\n # Users who can open the results page see a warning message in this case.\n sufficient_votes_warning = not course.can_publish_grades\n\n course.avg_grade, course.avg_deviation = calculate_average_grades_and_deviation(course)\n\n template_data = dict(\n course=course,\n course_sections_top=course_sections_top,\n course_sections_bottom=course_sections_bottom,\n contributor_sections=contributor_sections,\n evaluation_warning=evaluation_warning,\n sufficient_votes_warning=sufficient_votes_warning,\n show_grades=show_grades,\n reviewer=request.user.is_reviewer,\n contributor=course.is_user_contributor_or_delegate(request.user),\n can_download_grades=request.user.can_download_grades,\n public_view=public_view)\n return render(request, \"results_course_detail.html\", template_data)\n\n\ndef user_can_see_text_answer(user, represented_users, text_answer, public_view=False):\n if public_view:\n return False\n if text_answer.state not in COMMENT_STATES_REQUIRED_FOR_VISIBILITY:\n return False\n if user.is_reviewer:\n return True\n\n contributor = text_answer.contribution.contributor\n\n if text_answer.is_private:\n return contributor == user\n\n if text_answer.is_published:\n if text_answer.contribution.responsible:\n return contributor == user or user in contributor.delegates.all()\n\n if contributor in represented_users:\n return True\n if text_answer.contribution.course.contributions.filter(\n contributor__in=represented_users, comment_visibility=Contribution.ALL_COMMENTS).exists():\n return True\n if text_answer.contribution.is_general and text_answer.contribution.course.contributions.filter(\n contributor__in=represented_users, comment_visibility=Contribution.COURSE_COMMENTS).exists():\n return True\n\n return False\n"
},
{
"alpha_fraction": 0.688971757888794,
"alphanum_fraction": 0.6983909010887146,
"avg_line_length": 41.46666717529297,
"blob_id": "297e46b73ba81de780a2fb4367b3b7357d3191e6",
"content_id": "72240104a0f3f16e2266f3ac311228ebef6aa6a4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10192,
"license_type": "permissive",
"max_line_length": 152,
"num_lines": 240,
"path": "/evap/results/tools.py",
"repo_name": "phoeinx/EvaP",
"src_encoding": "UTF-8",
"text": "from collections import namedtuple, defaultdict, OrderedDict\nfrom functools import partial\nfrom math import ceil\nfrom statistics import pstdev, median\n\nfrom django.conf import settings\nfrom django.core.cache import caches\nfrom django.db.models import Sum\n\nfrom evap.evaluation.models import TextAnswer, Contribution, RatingAnswerCounter\nfrom evap.evaluation.tools import questionnaires_and_contributions\n\n\nGRADE_COLORS = {\n 1: (136, 191, 74),\n 2: (187, 209, 84),\n 3: (239, 226, 88),\n 4: (242, 158, 88),\n 5: (235, 89, 90),\n}\n\nCOMMENT_STATES_REQUIRED_FOR_VISIBILITY = [TextAnswer.PRIVATE, TextAnswer.PUBLISHED]\n\n\n# see calculate_results\nResultSection = namedtuple('ResultSection', ('questionnaire', 'contributor', 'label', 'results', 'warning'))\nCommentSection = namedtuple('CommentSection', ('questionnaire', 'contributor', 'label', 'is_responsible', 'results'))\nRatingResult = namedtuple('RatingResult', ('question', 'total_count', 'average', 'deviation', 'counts', 'warning'))\nYesNoResult = namedtuple('YesNoResult', ('question', 'total_count', 'average', 'deviation', 'counts', 'warning', 'approval_count'))\nTextResult = namedtuple('TextResult', ('question', 'answers'))\nHeadingResult = namedtuple('HeadingResult', ('question'))\n\n\ndef avg(iterable):\n \"\"\"Simple arithmetic average function. Returns `None` if the length of\n `iterable` is 0 or no items except None exist.\"\"\"\n items = [item for item in iterable if item is not None]\n if len(items) == 0:\n return None\n return float(sum(items)) / len(items)\n\n\ndef mix(a, b, alpha):\n if a is None and b is None:\n return None\n if a is None:\n return b\n if b is None:\n return a\n\n return alpha * a + (1 - alpha) * b\n\n\ndef get_answers(contribution, question):\n return question.answer_class.objects.filter(contribution=contribution, question=question)\n\n\ndef get_number_of_answers(contribution, question):\n answers = get_answers(contribution, question)\n if question.is_rating_question:\n return get_sum_of_answer_counters(answers)\n else:\n return len(answers)\n\n\ndef get_sum_of_answer_counters(answer_counters):\n return answer_counters.aggregate(total_count=Sum('count'))['total_count'] or 0\n\n\ndef get_answers_from_answer_counters(answer_counters):\n answers = []\n for answer_counter in answer_counters:\n for __ in range(0, answer_counter.count):\n answers.append(answer_counter.answer)\n return answers\n\n\ndef get_textanswers(contribution, question, filter_states=None):\n assert question.is_text_question\n answers = get_answers(contribution, question)\n if filter_states is not None:\n answers = answers.filter(state__in=filter_states)\n return answers\n\n\ndef get_counts(question, answer_counters):\n counts = OrderedDict()\n\n possible_answers = range(1, 6)\n if question.is_yes_no_question:\n possible_answers = [1, 5]\n\n # ensure ordering of answers\n for answer in possible_answers:\n counts[answer] = 0\n\n for answer_counter in answer_counters:\n counts[answer_counter.answer] = answer_counter.count\n return counts\n\n\ndef get_results_cache_key(course):\n return 'evap.staff.results.tools.calculate_results-{:d}'.format(course.id)\n\n\ndef calculate_results(course, force_recalculation=False):\n if course.state != \"published\":\n return _calculate_results_impl(course)\n\n cache_key = get_results_cache_key(course)\n if force_recalculation:\n caches['results'].delete(cache_key)\n return caches['results'].get_or_set(cache_key, partial(_calculate_results_impl, course))\n\n\ndef _calculate_results_impl(course):\n \"\"\"Calculates the result data for a single course. Returns a list of\n `ResultSection` tuples. Each of those tuples contains the questionnaire, the\n contributor (or None), a list of single result elements, the average grade and\n deviation for that section (or None). The result elements are either\n `RatingResult` or `TextResult` instances.\"\"\"\n\n # there will be one section per relevant questionnaire--contributor pair\n sections = []\n\n # calculate the median values of how many people answered a questionnaire type (lecturer, tutor, ...)\n questionnaire_med_answers = defaultdict(list)\n questionnaire_max_answers = {}\n questionnaire_warning_thresholds = {}\n for questionnaire, contribution in questionnaires_and_contributions(course):\n max_answers = max([get_number_of_answers(contribution, question) for question in questionnaire.rating_questions], default=0)\n questionnaire_max_answers[(questionnaire, contribution)] = max_answers\n questionnaire_med_answers[questionnaire].append(max_answers)\n for questionnaire, max_answers in questionnaire_med_answers.items():\n questionnaire_warning_thresholds[questionnaire] = max(settings.RESULTS_WARNING_PERCENTAGE * median(max_answers), settings.RESULTS_WARNING_COUNT)\n\n for questionnaire, contribution in questionnaires_and_contributions(course):\n # will contain one object per question\n results = []\n for question in questionnaire.question_set.all():\n if question.is_rating_question:\n answer_counters = get_answers(contribution, question)\n answers = get_answers_from_answer_counters(answer_counters)\n\n total_count = len(answers)\n average = avg(answers) if total_count > 0 else None\n deviation = pstdev(answers, average) if total_count > 0 else None\n counts = get_counts(question, answer_counters)\n warning = total_count > 0 and total_count < questionnaire_warning_thresholds[questionnaire]\n\n if question.is_yes_no_question:\n if question.is_positive_yes_no_question:\n approval_count = counts[1]\n else:\n approval_count = counts[5]\n results.append(YesNoResult(question, total_count, average, deviation, counts, warning, approval_count))\n else:\n results.append(RatingResult(question, total_count, average, deviation, counts, warning))\n\n elif question.is_text_question:\n answers = get_textanswers(contribution, question, COMMENT_STATES_REQUIRED_FOR_VISIBILITY)\n results.append(TextResult(question=question, answers=answers))\n\n elif question.is_heading_question:\n results.append(HeadingResult(question=question))\n\n section_warning = questionnaire_max_answers[(questionnaire, contribution)] < questionnaire_warning_thresholds[questionnaire]\n\n sections.append(ResultSection(questionnaire, contribution.contributor, contribution.label, results, section_warning))\n\n return sections\n\n\ndef calculate_average_grades_and_deviation(course):\n \"\"\"Determines the final average grade and deviation for a course.\"\"\"\n avg_generic_likert = []\n avg_contribution_likert = []\n dev_generic_likert = []\n dev_contribution_likert = []\n avg_generic_grade = []\n avg_contribution_grade = []\n dev_generic_grade = []\n dev_contribution_grade = []\n\n for __, contributor, __, results, __ in calculate_results(course):\n average_likert = avg([result.average for result in results if result.question.is_likert_question])\n deviation_likert = avg([result.deviation for result in results if result.question.is_likert_question])\n average_grade = avg([result.average for result in results if result.question.is_grade_question])\n deviation_grade = avg([result.deviation for result in results if result.question.is_grade_question])\n\n (avg_contribution_likert if contributor else avg_generic_likert).append(average_likert)\n (dev_contribution_likert if contributor else dev_generic_likert).append(deviation_likert)\n (avg_contribution_grade if contributor else avg_generic_grade).append(average_grade)\n (dev_contribution_grade if contributor else dev_generic_grade).append(deviation_grade)\n\n # the final total grade will be calculated by the following formula (GP = GRADE_PERCENTAGE, CP = CONTRIBUTION_PERCENTAGE):\n # final_likert = CP * likert_answers_about_persons + (1-CP) * likert_answers_about_courses\n # final_grade = CP * grade_answers_about_persons + (1-CP) * grade_answers_about_courses\n # final = GP * final_grade + (1-GP) * final_likert\n\n final_likert_avg = mix(avg(avg_contribution_likert), avg(avg_generic_likert), settings.CONTRIBUTION_PERCENTAGE)\n final_likert_dev = mix(avg(dev_contribution_likert), avg(dev_generic_likert), settings.CONTRIBUTION_PERCENTAGE)\n final_grade_avg = mix(avg(avg_contribution_grade), avg(avg_generic_grade), settings.CONTRIBUTION_PERCENTAGE)\n final_grade_dev = mix(avg(dev_contribution_grade), avg(dev_generic_grade), settings.CONTRIBUTION_PERCENTAGE)\n\n final_avg = mix(final_grade_avg, final_likert_avg, settings.GRADE_PERCENTAGE)\n final_dev = mix(final_grade_dev, final_likert_dev, settings.GRADE_PERCENTAGE)\n\n return final_avg, final_dev\n\n\ndef has_no_rating_answers(course, contributor, questionnaire):\n questions = questionnaire.rating_questions\n contribution = Contribution.objects.get(course=course, contributor=contributor)\n return RatingAnswerCounter.objects.filter(question__in=questions, contribution=contribution).count() == 0\n\n\ndef color_mix(color1, color2, fraction):\n return tuple(\n int(round(color1[i] * (1 - fraction) + color2[i] * fraction)) for i in range(3)\n )\n\n\ndef get_grade_color(grade):\n # Can happen if no one leaves any grades. Return white because its least likely to cause problems.\n if grade is None:\n return (255, 255, 255)\n grade = round(grade, 1)\n next_lower = int(grade)\n next_higher = int(ceil(grade))\n return color_mix(GRADE_COLORS[next_lower], GRADE_COLORS[next_higher], grade - next_lower)\n\n\ndef get_deviation_color(deviation):\n if deviation is None:\n return (255, 255, 255)\n\n capped_deviation = min(deviation, 2.0) # values above that are very uncommon in practice\n val = int(255 - capped_deviation * 60) # tweaked to look good\n return (val, val, val)\n"
},
{
"alpha_fraction": 0.5196629166603088,
"alphanum_fraction": 0.6966292262077332,
"avg_line_length": 26.384614944458008,
"blob_id": "9c956ac90926e27bb01f8562d69076fd6e1785b9",
"content_id": "823297ef3070e0f70f4a8fb430dcfcc3dc5ff3f1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 356,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 13,
"path": "/requirements.txt",
"repo_name": "phoeinx/EvaP",
"src_encoding": "UTF-8",
"text": "django >= 2.0, < 2.1\nxlrd == 1.1.0\nxlwt == 1.3.0\nxlutils == 2.0.0\npsycopg2 == 2.7.3.2\ndjango-redis == 4.9.0\ndjango-fsm == 2.6.0\ndjango-webtest == 1.9.2\nWebTest == 2.0.29\ngit+git://github.com/vandersonmota/model_mommy.git@b6a6ac179d5a0e1e22265f7fe97391d1afb32ae2#egg=model-mommy\ndjango-extensions == 1.9.9\ndjango-sendfile == 0.3.11\ndjango-compressor == 2.2\n"
},
{
"alpha_fraction": 0.7179487347602844,
"alphanum_fraction": 0.7179487347602844,
"avg_line_length": 26.85714340209961,
"blob_id": "3a88b3fd652101c6f176d3e0ef8da2442778b2a5",
"content_id": "c8edcf9a7007b1e865d203d4765317b919d4363e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 390,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 14,
"path": "/evap/results/templatetags/results_templatetags.py",
"repo_name": "phoeinx/EvaP",
"src_encoding": "UTF-8",
"text": "from django.template import Library\nfrom evap.results.tools import get_grade_color, get_deviation_color\n\nregister = Library()\n\n\[email protected](name='gradecolor')\ndef gradecolor(grade):\n return 'rgb({}, {}, {})'.format(*get_grade_color(grade))\n\n\[email protected](name='deviationcolor')\ndef deviationcolor(deviation):\n return 'rgb({}, {}, {})'.format(*get_deviation_color(deviation))\n"
}
] | 5 |
noinacoding/RPS-Game | https://github.com/noinacoding/RPS-Game | 97312169c46bc1cdfc577844b0e496597e18a5f0 | 34b5d568eb7a7e2c902375a2780b7ad7bd475aec | 5e992326bb67a51b01fbdf753b7b1d2df7614718 | refs/heads/main | 2023-06-04T11:05:52.543963 | 2021-06-22T05:54:05 | 2021-06-22T05:54:05 | 378,911,651 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4697328507900238,
"alphanum_fraction": 0.47311463952064514,
"avg_line_length": 27.16190528869629,
"blob_id": "842838cf9d6517f6a4b4dc09cccf512c0d3db28c",
"content_id": "b7e04e6de474e3df7c141668e3b1066c902e86b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3395,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 105,
"path": "/main.py",
"repo_name": "noinacoding/RPS-Game",
"src_encoding": "UTF-8",
"text": "# โมดูลที่ต้องใช้\nimport random\nimport time\n\n\n# เล่นอีกรอบ\n\n\ndef replay():\n # ตัวเกมหลัก\n\n def rps():\n if answer == 1:\n return \"Rock\"\n elif answer == 2:\n return \"Paper\"\n elif answer == 3:\n return \"Scissors\"\n\n # เปลี่ยนตัวเลือกย่อยเป็นคำหลัก\n def rename():\n if choice == \"R\" or choice == \"r\" or choice == \"Rock\" or choice == \"rock\":\n return \"Rock\"\n if choice == \"P\" or choice == \"p\" or choice == \"Paper\" or choice == \"paper\":\n return \"Paper\"\n if choice == \"S\" or choice == \"s\" or choice == \"Scissors\" or choice == \"scissors\":\n return \"Scissors\"\n\n # กระบวนการของตัวเลือกค้อน\n def rock():\n if rps() == \"Rock\":\n print(\"Tie\")\n print(\"Since it was tie you need to pick again\")\n replay()\n elif rps() == \"Paper\":\n print(\"You lost\")\n elif rps() == \"Scissors\":\n print(\"You won\")\n\n # กระบวนการของตัวเลือกกระดาษ\n def paper():\n if rps() == \"Paper\":\n print(\"Tie\")\n print(\"Since it was tie you need to pick again\")\n replay()\n elif rps() == \"Scissors\":\n print(\"You lost\")\n elif rps() == \"Rock\":\n print(\"You won\")\n\n # กระบวนการของตัวเลือกกรรไกร\n def scissors():\n if rps() == \"Scissors\":\n print(\"Tie\")\n print(\"Since it was tie you need to pick again\")\n replay()\n elif rps() == \"Rock\":\n print(\"You lost\")\n elif rps() == \"Paper\":\n print(\"You won\")\n\n # ประมวลผลลัพธ์\n def result():\n if rename() == \"Rock\":\n rock()\n elif rename() == \"Paper\":\n paper()\n elif rename() == \"Scissors\":\n scissors()\n\n # รับข้อมูลตัวเลือกจากผู้เล่นและทำการประมวลผลลัพธ์\n choice = str(input(\"Please choose between Rock (R), Paper (P), Scissors (S) \"))\n answer = random.randint(1, 3)\n rps()\n rename()\n\n if rename() == \"Rock\" or rename() == \"Paper\" or rename() == \"Scissors\":\n print(\"You have choose\", rename())\n time.sleep(1)\n print(\"The bot is now picking their choice..\")\n time.sleep(3)\n print(\"The bot have choose\", rps())\n print(\"So the result is..\")\n result()\n time.sleep(1.2)\n # เล่นอีกรอบ\n again = str(input(\"Would you like to play again? Yes (Y) or No (N) \"))\n if again == \"Yes\" or again == \"Y\" or again == \"y\" or again == \"yes\":\n print(\"Creating a new game..\")\n print(\"-------------------------------------------------\")\n replay()\n elif again == \"No\" or again == \"N\" or again == \"n\" or again == \"no\":\n print(\"Ending process..\")\n else:\n print(\"Invalid answer\")\n time.sleep(1)\n print(\"Ending process..\")\n else:\n print(\"Invalid choice\")\n replay()\n\n\n# เริ่มเกม\n\nreplay()\n"
}
] | 1 |
ahmetfarukyilmaz/leetcode-solutions | https://github.com/ahmetfarukyilmaz/leetcode-solutions | a6f4c77149fc87eb3818fcf61c57ecde767ff50b | be4efe02c27e4f813f468949347ec8410e5db677 | 61b7d4961f790e7d8868291d0b696a8c65a6efe0 | refs/heads/main | 2023-05-09T06:06:50.106997 | 2021-05-26T19:42:20 | 2021-05-26T19:42:20 | 351,579,747 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6951871514320374,
"alphanum_fraction": 0.7058823704719543,
"avg_line_length": 30.33333396911621,
"blob_id": "9eb72504fbfae567319e389ec38a6cac18c99b1f",
"content_id": "dd5ee679b16927149fd344a87082b202e78e4779",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 187,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 6,
"path": "/sql/Second-Highest-Salary.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "--https://leetcode.com/problems/second-highest-salary/\n\nselect (select distinct Salary\n from Employee\n order by Salary desc\n limit 1 offset 1 ) as SecondHighestSalary"
},
{
"alpha_fraction": 0.768750011920929,
"alphanum_fraction": 0.78125,
"avg_line_length": 21.85714340209961,
"blob_id": "db4927e34ad90d43bb7ca368e4c08953b2e71f19",
"content_id": "ffb2d2d84ff2cba713936e80511eed66a2fb4ab9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 160,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 7,
"path": "/sql/Not-Boring-Movies.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "\n--https://leetcode.com/problems/not-boring-movies/\n\n\nselect id,movie,description,rating\nfrom cinema\nwhere id%2=1 and description!='boring'\norder by rating desc"
},
{
"alpha_fraction": 0.5106382966041565,
"alphanum_fraction": 0.5148935914039612,
"avg_line_length": 17.076923370361328,
"blob_id": "4a7955298ce91e1998f1f74282f8c0ef53503ce0",
"content_id": "4b93f67b4fc1b37f3d13828dfc6ce3989ecb5f51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 235,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 13,
"path": "/algorithm/best_time_to_buy_and_sell_stock.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "import sys\n\n\ndef maxProfit(self, prices):\n min = sys.maxsize\n max_profit = 0\n\n for i in prices:\n if i < min:\n min = i\n elif i - min > max_profit:\n max_profit = i - min\n return max_profit\n"
},
{
"alpha_fraction": 0.7526881694793701,
"alphanum_fraction": 0.7903226017951965,
"avg_line_length": 30,
"blob_id": "35fc415e0cbb4acc21893c732a6dd476cc085a22",
"content_id": "5e9a29988bf3bdbec4edbc2fa9d8fc2892817dc3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 186,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 6,
"path": "/sql/Employees-Earning-More-Than-Their Managers.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "\n--https://leetcode.com/problems/employees-earning-more-than-their-managers/\n\nselect e1.Name as Employee\nfrom Employee e1 join Employee e2 \non e1.managerId=e2.Id \nand e1.Salary>e2.Salary"
},
{
"alpha_fraction": 0.7861271500587463,
"alphanum_fraction": 0.7861271500587463,
"avg_line_length": 20.75,
"blob_id": "32388fbcfc9927e5c9623da9f6d5a0fb63d0c1db",
"content_id": "7e306525b82db6a3fa2cb26b6a8ae475d7b7bd0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 173,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 8,
"path": "/sql/Customers-Who-Never-Order.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "--https://leetcode.com/problems/customers-who-never-order/\n\nselect Customers.Name as Customers\nfrom Customers\nwhere Customers.Id not in\n(\n select CustomerId from Orders\n)"
},
{
"alpha_fraction": 0.4692307710647583,
"alphanum_fraction": 0.4692307710647583,
"avg_line_length": 27.88888931274414,
"blob_id": "05024a2c8bd9784bac7daf118dc0c6cb5f2240f7",
"content_id": "6990023863045b232a96e1a5f09cd2f6bd028b87",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 260,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 9,
"path": "/algorithm/two_sum.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "def twoSum(nums, target):\n result = []\n\n for i in range(len(nums)):\n for j in range(len(nums)):\n if nums[j] + nums[i] == target and i != j:\n result.append(i)\n result.append(j)\n return result\n"
},
{
"alpha_fraction": 0.778761088848114,
"alphanum_fraction": 0.7876105904579163,
"avg_line_length": 18,
"blob_id": "8cec282294ffe826ed79818ccb63039b8d3bacab",
"content_id": "ea7790f5250213681a7816d798c209c1f417b473",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 113,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 6,
"path": "/sql/Duplicate-Emails.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "--https://leetcode.com/problems/duplicate-emails/\n\nselect Email \nfrom Person\ngroup by Email\nhaving count(Email)>1"
},
{
"alpha_fraction": 0.4128686189651489,
"alphanum_fraction": 0.4155496060848236,
"avg_line_length": 27.69230842590332,
"blob_id": "8b473e1c1e5c9dc4922812544efa40aacab5c19f",
"content_id": "4198265a7ae445f3d5afc43217054d5c8d062dd8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 373,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 13,
"path": "/algorithm/valid_parantheses.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "class Solution:\n def isValid(self, s):\n parentheses = {\"}\": \"{\", \"]\": \"[\", \")\": \"(\"}\n result = []\n\n for i in s:\n if i in parentheses.values():\n result.append(i)\n elif result and parentheses[i] == result[-1]:\n result.pop()\n else:\n return False\n return result == []\n"
},
{
"alpha_fraction": 0.5207667946815491,
"alphanum_fraction": 0.5399361252784729,
"avg_line_length": 30.299999237060547,
"blob_id": "b8657befce8de504bcb1ac200de5162e5164834d",
"content_id": "3f178f6c2eefe063148bf810d607d147a0c4159c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 313,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 10,
"path": "/algorithm/matrix_diagonal_sum.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "\n#https://leetcode.com/problems/matrix-diagonal-sum/\n\nclass Solution:\n def diagonalSum(self, mat: List[List[int]]) -> int:\n sum=0\n if len(mat)%2==1:\n sum-=mat[len(mat)//2][len(mat)//2]\n for i in range(len(mat)):\n sum+=mat[i][len(mat)-i-1]+mat[i][i]\n return sum"
},
{
"alpha_fraction": 0.5721924901008606,
"alphanum_fraction": 0.5828877091407776,
"avg_line_length": 22.375,
"blob_id": "078413f51133a2cbbe83a9684700bd8dd467e964",
"content_id": "807f6faacb6a2e06b248fd584155586e688dd348",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 187,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 8,
"path": "/algorithm/contains_duplicate.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "def containsDuplicate(nums):\n sorted_nums = sorted(nums)\n\n for i in range(len(nums) - 1):\n\n if sorted_nums[i] == sorted_nums[i + 1]:\n return True\n return False\n"
},
{
"alpha_fraction": 0.8152866363525391,
"alphanum_fraction": 0.8152866363525391,
"avg_line_length": 30.399999618530273,
"blob_id": "b15fbc26f7e3a8ac67c9b66e3ac27dfeaab18f26",
"content_id": "7c7e34ffd5bd2a4e2087777942ea402206d99ce8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 157,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 5,
"path": "/sql/Combine-Two-Tables.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "\n--https://leetcode.com/problems/combine-two-tables/\n\nselect FirstName,LastName,City,State\nfrom Person left join Address on \nPerson.PersonId=Address.PersonId"
},
{
"alpha_fraction": 0.7599999904632568,
"alphanum_fraction": 0.8057143092155457,
"avg_line_length": 28.16666603088379,
"blob_id": "01e4c16be1653cd80c750f918cb2c55481dd387c",
"content_id": "64b7121b8d97bb9570327b60ad440a352ca9728f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 175,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 6,
"path": "/sql/Rising-Temperature.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "\n--https://leetcode.com/problems/rising-temperature/\n\nselect w2.id\nfrom Weather w1 join Weather w2 on\ndatediff(w2.recordDate,w1.recordDate)=1\nand w2.Temperature>w1.Temperature"
},
{
"alpha_fraction": 0.5365853905677795,
"alphanum_fraction": 0.5365853905677795,
"avg_line_length": 26.66666603088379,
"blob_id": "68c249ff469bd9877214d56eb5172203ab00a506",
"content_id": "a2b7e6e8a24f65b067a6844e1968613a04311ad6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 82,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 3,
"path": "/algorithm/sum_of_two_integers.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "def getSum(self, a: int, b: int) -> int:\n numbers = (a, b)\n return sum(a, b)"
},
{
"alpha_fraction": 0.599056601524353,
"alphanum_fraction": 0.599056601524353,
"avg_line_length": 16.75,
"blob_id": "828ab7f711582a477f4b549f6ac4f872b62a2f72",
"content_id": "d388cac7740361c4514615f8897b562731f3497c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 212,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 12,
"path": "/sql/Swap-Salary.sql",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "--https://leetcode.com/problems/swap-salary/\n\nupdate Salary\nset sex = case sex \n when 'm' then 'f'\n when 'f' then 'm'\nend\n\n--ALTERNATIVE SOLUTION\n\nupdate Salary\nset sex = if(sex='m','f','m')"
},
{
"alpha_fraction": 0.4268142580986023,
"alphanum_fraction": 0.46494466066360474,
"avg_line_length": 27.034482955932617,
"blob_id": "d49ee12984564504f729fc87530bfef80f6878a5",
"content_id": "a9ff7c315e50aef32c06d8cc589025967756e0e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 813,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 29,
"path": "/algorithm/add_two_numbers.py",
"repo_name": "ahmetfarukyilmaz/leetcode-solutions",
"src_encoding": "UTF-8",
"text": "class ListNode:\n def __init__(self, val=0, next=None):\n self.val = val\n self.next = next\n\n\nclass Solution:\n def addTwoNumbers(self, l1, l2):\n l1_string = ''\n while l1 is not None:\n l1_string += str(l1.val)\n l1 = l1.next\n l1_string = l1_string[::-1]\n l2_string = ''\n while l2 is not None:\n l2_string += str(l2.val)\n l2 = l2.next\n l2_string = l2_string[::-1]\n result = str(int(l1_string) + int(l2_string))[::-1]\n result = list(map(int, str(result)))\n l3 = ListNode()\n res = l3\n for i in range(len(result)):\n l3.val = result[i]\n if i == len(result) - 1:\n break\n l3.next = ListNode()\n l3 = l3.next\n return res\n"
}
] | 15 |
s18k/Django-Attendance-Analysis | https://github.com/s18k/Django-Attendance-Analysis | f110d4e989ae012bc0505eb6b87495081669bae0 | e6dddaef9cd5a8679fb0fa6313f59557f9248311 | 4c8ede5c0459356f20bc187892017c5a7de69e2a | refs/heads/master | 2022-07-12T09:21:53.406819 | 2020-05-18T18:37:46 | 2020-05-18T18:37:46 | 257,332,647 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5321716070175171,
"alphanum_fraction": 0.5616621971130371,
"avg_line_length": 25.64285659790039,
"blob_id": "15210ec19f564c8940f8c04e25dc38af481c6339",
"content_id": "d18e3b72cd298f599509a6df0003e0bb2f5cd52a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 746,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 28,
"path": "/analysis/migrations/0002_auto_20200407_1122.py",
"repo_name": "s18k/Django-Attendance-Analysis",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-07 05:52\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('analysis', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='employee',\n name='employee_contact',\n field=models.IntegerField(default=0),\n ),\n migrations.AddField(\n model_name='employee',\n name='employee_email',\n field=models.CharField(default='', max_length=50),\n ),\n migrations.AddField(\n model_name='employee',\n name='employee_image',\n field=models.ImageField(default='', upload_to='analysis/images'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.6050347089767456,
"alphanum_fraction": 0.6276041865348816,
"avg_line_length": 31.885713577270508,
"blob_id": "498a487bf4081e0344c88b13cc08a5d4dc535571",
"content_id": "7d03f9a4e295c8e099e6450a8589aee3b814c7d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1152,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 35,
"path": "/analysis/models.py",
"repo_name": "s18k/Django-Attendance-Analysis",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\n# Create your models here.\nclass Employee(models.Model):\n employee_id = models.IntegerField(default=0)\n employee_name=models.CharField(max_length=200)\n employee_email = models.CharField(max_length=50,default=\"\")\n employee_contact=models.IntegerField(default=0)\n employee_position = models.CharField(max_length=50)\n employee_image = models.ImageField(upload_to=\"analysis/images\",default=\"\")\n\n def __str__(self):\n return (str(self.employee_id)+self.employee_name)\n\n def getname(self):\n\n return self.employee_name\n\n\nclass Document(models.Model):\n monthnumber=models.IntegerField(default=1)\n year=models.IntegerField(default=0)\n document=models.FileField(upload_to=\"analysis/files\",default=\"\")\n\n def __str__(self):\n months = {1: \"January\", 2: \"February\", 3: \"March\", 4: \"April\", 5: \"May\", 6: \"June\", 7: \"July\", 8: \"August\",\n 9: \"September\", 10: \"October\"\n , 11: \"November\", 12: \"December\"}\n s=\"\"\n m=self.monthnumber\n self.month=months[m]\n s+=months[m]\n s+=\" \"\n s+=str(self.year)\n return (s)\n\n"
},
{
"alpha_fraction": 0.5361445546150208,
"alphanum_fraction": 0.5677710771560669,
"avg_line_length": 29.18181800842285,
"blob_id": "f474236be47b3ffa6a842babc0636e6410b22883",
"content_id": "a43379bb6838baf882d4b9bfe58aa66b9df07a19",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 664,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 22,
"path": "/analysis/migrations/0004_document.py",
"repo_name": "s18k/Django-Attendance-Analysis",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-12 06:06\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('analysis', '0003_employee_employee_id'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Document',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('month', models.IntegerField(default=0)),\n ('year', models.IntegerField(default=0)),\n ('document', models.FileField(default='', upload_to='analysis/files')),\n ],\n ),\n ]\n"
},
{
"alpha_fraction": 0.5175257921218872,
"alphanum_fraction": 0.5587629079818726,
"avg_line_length": 21.045454025268555,
"blob_id": "d10387335a349732f6e371f294d5a108162828a5",
"content_id": "98ccce9b3b0e7391faaf812ab25f8eb206e63646",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 485,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 22,
"path": "/analysis/migrations/0005_auto_20200412_1751.py",
"repo_name": "s18k/Django-Attendance-Analysis",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.5 on 2020-04-12 12:21\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('analysis', '0004_document'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='document',\n name='month',\n ),\n migrations.AddField(\n model_name='document',\n name='monthnumber',\n field=models.IntegerField(default=1),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5838781595230103,
"alphanum_fraction": 0.6043537855148315,
"avg_line_length": 35.92042541503906,
"blob_id": "946fef6b312ac2bde0301456e05fbe710936938c",
"content_id": "226d8cfbb383178b644329ea099111f18be1830c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13919,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 377,
"path": "/analysis/views.py",
"repo_name": "s18k/Django-Attendance-Analysis",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom .models import Employee\nfrom django.http import HttpResponse\nimport pandas as pd\nimport datetime\nfrom .models import Document\nfrom math import ceil\ndef index(request):\n months={1:\"January\",2:\"February\",3:\"March\",4:\"April\",5:\"May\",6:\"June\",7:\"July\",8:\"August\",9:\"September\",10:\"October\"\n ,11:\"November\",12:\"December\"}\n employees=Employee.objects.all()\n documents=Document.objects.all()\n print(documents)\n print(employees)\n doc=Document.objects.get(monthnumber=1)\n n=len(employees)\n\n nSlides=n//5 +ceil((n/4)-(n//4))\n params={'employee':employees,'Document':doc,'month':months[1],'no_of_slides':nSlides,'range':range(n)}\n return render(request,'analysis/index.html',params)\ndef about(request):\n return render(request,'analysis/about.html')\n\ndef chosen(request,id):\n id=str(id)\n year=id[:4]\n month=id[4:]\n m=int(month)\n yr=int(year)\n documents=Document.objects.get(monthnumber=m,year=yr)\n months = {1: \"January\", 2: \"February\", 3: \"March\", 4: \"April\", 5: \"May\", 6: \"June\", 7: \"July\", 8: \"August\",\n 9: \"September\", 10: \"October\"\n , 11: \"November\", 12: \"December\"}\n employees = Employee.objects.all()\n params={}\n params['employee']=employees\n params['Document']=documents\n params['month']=months[m]\n s=months[m]+str(yr)\n params['name']=s\n n = len(employees)\n\n nSlides = n // 5 + ceil((n / 4) - (n // 4))\n params['no_of_slides']=nSlides\n params['range']=range(n)\n print(documents)\n\n return render(request,'analysis/index2.html',params)\n\ndef instructions(request):\n return render(request,'analysis/instructions.html')\ndef chose(request):\n documents = Document.objects.all()\n n=len(documents)\n months = [\"January\",\"February\",\"March\",\"April\",\"May\",\"June\",\"July\",\"August\",\"September\",\"October\"\n , \"November\",\"December\"]\n params={'documents':documents,'number':n,'range':range(n),'months':months}\n return render(request,'analysis/chose.html',params)\n\ndef search(request):\n query=request.GET.get('search')\n\n emp=Employee.objects.get(employee_name=query).employee_position\n print(emp)\n\n\n return HttpResponse(request,\"Search\")\n\ndef employeem(request,id,m):\n m = str(m)\n year = m[:4]\n month = m[4:]\n m = int(month)\n yr = int(year)\n months = {1: \"January\", 2: \"February\", 3: \"March\", 4: \"April\", 5: \"May\", 6: \"June\", 7: \"July\", 8: \"August\",\n 9: \"September\", 10: \"October\"\n , 11: \"November\", 12: \"December\"}\n name=\"\"\n name+=months[m]\n name+=str(yr)\n name+=\".xlsx\"\n link=\"./media/analysis/files/\"\n link+=name\n df = pd.read_excel(link)\n df['InTime'] = df['InTime'].fillna(\"00:00:00\")\n df['OutTime'] = df['OutTime'].fillna(\"00:00:00\")\n df['ODINTime'] = df['ODINTime'].fillna(\"00:00:00\")\n df['ODOutTime'] = df['ODOutTime'].fillna(\"00:00:00\")\n df['Leave'] = df['Leave'].fillna(\"N\")\n df['INTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"InTime\"].astype(str)\n df['OutTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"OutTime\"].astype(str)\n df['ODINTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"ODINTime\"].astype(str)\n df['ODOutTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"ODOutTime\"].astype(str)\n emp_count = df['EmployeeID'].nunique()\n days = df['Date'].nunique()\n loop_count = emp_count * days\n\n l = []\n odleave = []\n leaves = {}\n\n for i in range(0, loop_count):\n intime = datetime.datetime.strptime(df[\"INTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n outtime = datetime.datetime.strptime(df[\"OutTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n odintime = datetime.datetime.strptime(df[\"ODINTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n odouttime = datetime.datetime.strptime(df[\"ODOutTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n\n total_out = (outtime.hour * 60 + outtime.minute)\n total_int = (intime.hour * 60 + intime.minute)\n total_od_out = (odouttime.hour * 60 + odouttime.minute)\n total_od_int = (odintime.hour * 60 + odintime.minute)\n od_leave_time = (total_od_int - total_od_out) / 60\n od_leave_time=float(\"%.2f\" % round(od_leave_time,2))\n working_time = (total_out - total_int) / 60\n working_time = float(\"%.2f\" % round(working_time, 2))\n l.append(working_time)\n odleave.append(od_leave_time)\n # print(intime,outtime,sep=\" \")\n df['Working_time'] = l\n df['ODLeave'] = odleave\n\n grp_weekdays = df.groupby(['EmployeeID', 'Day']).mean()\n grp_total_working_time = df.groupby(['EmployeeID']).sum()\n\n # print(df)\n df11 = df.groupby('EmployeeID')['Leave'].apply(lambda x: (x == 'C').sum()).reset_index(name='Casualleavecount')\n df12 = df.groupby('EmployeeID')['Leave'].apply(lambda x: (x == 'M').sum()).reset_index(name='Medicalleavecount')\n df13 = df.groupby('EmployeeID')['Leave'].apply(lambda x: (x == 'OD').sum()).reset_index(name='ODleavecount')\n params={}\n medical= list(df12[df12['EmployeeID']==id]['Medicalleavecount'])[0]\n casual = list(df11[df11['EmployeeID'] == id]['Casualleavecount'])[0]\n odleave = list(df13[df13['EmployeeID'] == id]['ODleavecount'])[0]\n total_leaves=medical+casual+odleave\n gc = df.groupby(['EmployeeID', 'Day']).agg({'Working_time': ['mean']})\n gc.columns = ['Average_working_hrs']\n gc = gc.reset_index()\n gday = gc[gc['EmployeeID'] == id]\n monday = list(gday[gday['Day'] == 'Monday']['Average_working_hrs'])[0]\n monday = float(\"%.2f\" % round(monday, 2))\n tuesday = list(gday[gday['Day'] == 'Tuesday']['Average_working_hrs'])[0]\n tuesday = float(\"%.2f\" % round(tuesday, 2))\n wednesday =list(gday[gday['Day'] == 'Wednesday']['Average_working_hrs'])[0]\n wednesday = float(\"%.2f\" % round(wednesday, 2))\n thursday = list(gday[gday['Day'] == 'Thursday']['Average_working_hrs'])[0]\n thursday = float(\"%.2f\" % round(thursday, 2))\n friday = list(gday[gday['Day'] == 'Friday']['Average_working_hrs'])[0]\n friday = float(\"%.2f\" % round(friday, 2))\n saturday = list(gday[gday['Day'] == 'Saturday']['Average_working_hrs'])[0]\n saturday = float(\"%.2f\" % round(saturday, 2))\n l1=(list(df[df['EmployeeID'] == id]['Working_time']))\n g = df.groupby('EmployeeID')\n gp = g.get_group(id)\n gsum = gp.sum()\n totalworking=(gsum['Working_time'])\n totalworking = float(\"%.2f\" % round(totalworking, 2))\n totalodleave=(gsum['ODLeave'])\n dates = (list(df[df['EmployeeID'] == id]['Date']))\n date = []\n for d in dates:\n s = str(d)\n s = s[8:10]\n date.append(s)\n\n\n emp = Employee.objects.get(employee_id=id)\n empid=emp.employee_id\n empname=emp.employee_name\n empposition=emp.employee_position\n empemail=emp.employee_email\n empcontact=emp.employee_contact\n empimage=emp.employee_image\n print(empimage)\n\n params['monthyr']=months[m]+\" \"+str(yr)\n params[emp]=emp\n params['empid']=empid\n params['empname']=empname\n params['empposition']=empposition\n params['empemail']=empemail\n params['empcontact']=empcontact\n params['empimage']=empimage\n\n\n params['totalleaves']=total_leaves\n params['medical']=medical\n params['casual']=casual\n params['odleave']=odleave\n print(odleave)\n all_leaves=[medical,casual,odleave]\n params['all_leaves']=all_leaves\n\n params['monday']=monday\n params['tuesday']=tuesday\n params['wednesday']=wednesday\n params['thursday']=thursday\n params['friday']=friday\n params['saturday']=saturday\n params['monthlyworking']=l1\n params['date']=date\n params['range']=range(len(l1))\n params['totalworking']=totalworking\n params['totalodleave']=totalodleave\n\n params['emp']=emp\n print(\"printing dates and work \")\n print(date)\n print(l1)\n\n days=\"\"\n for i in date:\n days+=str(i)\n days+=\",\"\n days=days[:-1]\n params[\"days\"] = days\n work=\"\"\n for i in l1:\n work+=str(i)\n work+=\",\"\n work=work[:-1]\n params[\"w1\"]=8.3\n params[\"w2\"]=9.3\n params[\"w3\"]=7.3\n params[\"work\"]=work\n\n return render(request, 'analysis/employee.html',params)\ndef employee(request,id):\n\n months = {1: \"January\", 2: \"February\", 3: \"March\", 4: \"April\", 5: \"May\", 6: \"June\", 7: \"July\", 8: \"August\",\n 9: \"September\", 10: \"October\"\n , 11: \"November\", 12: \"December\"}\n name=\"\"\n name+=months[1]\n name+=str(2020)\n name+=\".xlsx\"\n link=\"./media/analysis/files/\"\n link+=name\n df = pd.read_excel(link)\n df['InTime'] = df['InTime'].fillna(\"00:00:00\")\n df['OutTime'] = df['OutTime'].fillna(\"00:00:00\")\n df['ODINTime'] = df['ODINTime'].fillna(\"00:00:00\")\n df['ODOutTime'] = df['ODOutTime'].fillna(\"00:00:00\")\n df['Leave'] = df['Leave'].fillna(\"N\")\n df['INTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"InTime\"].astype(str)\n df['OutTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"OutTime\"].astype(str)\n df['ODINTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"ODINTime\"].astype(str)\n df['ODOutTimestamp'] = df[\"Date\"].astype(str) + \" \" + df[\"ODOutTime\"].astype(str)\n emp_count = df['EmployeeID'].nunique()\n days = df['Date'].nunique()\n loop_count = emp_count * days\n\n l = []\n odleave = []\n leaves = {}\n\n for i in range(0, loop_count):\n intime = datetime.datetime.strptime(df[\"INTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n outtime = datetime.datetime.strptime(df[\"OutTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n odintime = datetime.datetime.strptime(df[\"ODINTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n odouttime = datetime.datetime.strptime(df[\"ODOutTimestamp\"][i], '%Y-%m-%d %H:%M:%S')\n\n total_out = (outtime.hour * 60 + outtime.minute)\n total_int = (intime.hour * 60 + intime.minute)\n total_od_out = (odouttime.hour * 60 + odouttime.minute)\n total_od_int = (odintime.hour * 60 + odintime.minute)\n od_leave_time = (total_od_int - total_od_out) / 60\n od_leave_time=float(\"%.2f\" % round(od_leave_time,2))\n working_time = (total_out - total_int) / 60\n working_time = float(\"%.2f\" % round(working_time, 2))\n l.append(working_time)\n odleave.append(od_leave_time)\n # print(intime,outtime,sep=\" \")\n df['Working_time'] = l\n df['ODLeave'] = odleave\n\n grp_weekdays = df.groupby(['EmployeeID', 'Day']).mean()\n grp_total_working_time = df.groupby(['EmployeeID']).sum()\n\n # print(df)\n df11 = df.groupby('EmployeeID')['Leave'].apply(lambda x: (x == 'C').sum()).reset_index(name='Casualleavecount')\n df12 = df.groupby('EmployeeID')['Leave'].apply(lambda x: (x == 'M').sum()).reset_index(name='Medicalleavecount')\n df13 = df.groupby('EmployeeID')['Leave'].apply(lambda x: (x == 'OD').sum()).reset_index(name='ODleavecount')\n params={}\n medical= list(df12[df12['EmployeeID']==id]['Medicalleavecount'])[0]\n casual = list(df11[df11['EmployeeID'] == id]['Casualleavecount'])[0]\n odleave = list(df13[df13['EmployeeID'] == id]['ODleavecount'])[0]\n total_leaves=medical+casual+odleave\n gc = df.groupby(['EmployeeID', 'Day']).agg({'Working_time': ['mean']})\n gc.columns = ['Average_working_hrs']\n gc = gc.reset_index()\n gday = gc[gc['EmployeeID'] == id]\n monday = list(gday[gday['Day'] == 'Monday']['Average_working_hrs'])[0]\n monday = float(\"%.2f\" % round(monday, 2))\n tuesday = list(gday[gday['Day'] == 'Tuesday']['Average_working_hrs'])[0]\n tuesday = float(\"%.2f\" % round(tuesday, 2))\n wednesday =list(gday[gday['Day'] == 'Wednesday']['Average_working_hrs'])[0]\n wednesday = float(\"%.2f\" % round(wednesday, 2))\n thursday = list(gday[gday['Day'] == 'Thursday']['Average_working_hrs'])[0]\n thursday = float(\"%.2f\" % round(thursday, 2))\n friday = list(gday[gday['Day'] == 'Friday']['Average_working_hrs'])[0]\n friday = float(\"%.2f\" % round(friday, 2))\n saturday = list(gday[gday['Day'] == 'Saturday']['Average_working_hrs'])[0]\n saturday = float(\"%.2f\" % round(saturday, 2))\n l1=(list(df[df['EmployeeID'] == id]['Working_time']))\n g = df.groupby('EmployeeID')\n gp = g.get_group(id)\n gsum = gp.sum()\n totalworking=(gsum['Working_time'])\n totalworking = float(\"%.2f\" % round(totalworking, 2))\n totalodleave=(gsum['ODLeave'])\n dates = (list(df[df['EmployeeID'] == id]['Date']))\n date = []\n for d in dates:\n s = str(d)\n s = s[8:10]\n date.append(s)\n\n\n emp = Employee.objects.get(employee_id=id)\n empid=emp.employee_id\n empname=emp.employee_name\n empposition=emp.employee_position\n empemail=emp.employee_email\n empcontact=emp.employee_contact\n empimage=emp.employee_image\n print(empimage)\n params[emp]=emp\n params['monthyr']=months[1]+\" \"+str(2020)\n params['empid']=empid\n params['empname']=empname\n params['empposition']=empposition\n params['empemail']=empemail\n params['empcontact']=empcontact\n params['empimage']=empimage\n\n\n params['totalleaves']=total_leaves\n params['medical']=medical\n params['casual']=casual\n params['odleave']=odleave\n print(odleave)\n all_leaves=[medical,casual,odleave]\n params['all_leaves']=all_leaves\n\n params['monday']=monday\n params['tuesday']=tuesday\n params['wednesday']=wednesday\n params['thursday']=thursday\n params['friday']=friday\n params['saturday']=saturday\n params['monthlyworking']=l1\n params['date']=date\n params['range']=range(len(l1))\n params['totalworking']=totalworking\n params['totalodleave']=totalodleave\n\n params['emp']=emp\n print(\"printing dates and work \")\n print(date)\n print(l1)\n\n days=\"\"\n for i in date:\n days+=str(i)\n days+=\",\"\n days=days[:-1]\n params[\"days\"] = days\n work=\"\"\n for i in l1:\n work+=str(i)\n work+=\",\"\n work=work[:-1]\n params[\"w1\"]=8.3\n params[\"w2\"]=9.3\n params[\"w3\"]=7.3\n params[\"work\"]=work\n\n return render(request, 'analysis/employee.html',params)\n"
}
] | 5 |
swiich/face_recognize | https://github.com/swiich/face_recognize | 51673217ba7a9b975c66864fab37b6bf37dfda03 | d9b016e0b80169805da9f74cb73e9dbba68d8d6b | aea4ea3a5fb97a1d0adaa3c0ba6084cadd0c86be | refs/heads/master | 2021-08-11T19:18:02.882509 | 2017-11-14T03:16:32 | 2017-11-14T03:16:32 | 110,634,500 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5223367810249329,
"alphanum_fraction": 0.5635738968849182,
"avg_line_length": 15.222222328186035,
"blob_id": "f80dd0d25a6c27535be454d273aa597d61e71f1f",
"content_id": "4e41bc743da5448f3a729c2fa3e58702fc0a8e6b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 291,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 18,
"path": "/opencv.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "import cv2\n\nvc = cv2.VideoCapture('test.avi')\n\nc = 1\n\nif vc.isOpened():\n rval, frame = vc.read()\nelse:\n rval = False\ntimF = 1000\nwhile rval:\n rval,frame = vc.read()\n if(c%timF == 0):\n cv2.imwrite('image/'+ str(c) + '.jpg', frame)\n c += 1\n cv2.waitKey(1)\nvc.release()"
},
{
"alpha_fraction": 0.5579196214675903,
"alphanum_fraction": 0.5768321752548218,
"avg_line_length": 25.4375,
"blob_id": "632ed538af9b3ca551f7fb59dd665fe5fb1396ab",
"content_id": "07bdbd5487b52da77bfdd89619b06b70dadf76d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 423,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 16,
"path": "/main.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "import avHash\nimport hamming\n\nimg_path = ('.\\img\\\\timg1.jpg', '.\\img\\\\timg0.jpg')\n\nif __name__ == '__main__':\n ham = hamming.hamming(avHash.get_hash(img_path[0]), avHash.get_hash(img_path[1]))\n print(avHash.get_hash(img_path[0]))\n print(avHash.get_hash(img_path[1]))\n print(ham)\n if ham == 0:\n print('the same pic')\n elif ham <= 5:\n print(\"image alike\")\n else:\n print('not alike')\n"
},
{
"alpha_fraction": 0.6345447301864624,
"alphanum_fraction": 0.6594528555870056,
"avg_line_length": 39.14754104614258,
"blob_id": "72fd9de9cc55ca7bae1aebcf65c4901e7461e7c1",
"content_id": "dfc6c68aee172c84132812e8037bc5944de05381",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2789,
"license_type": "no_license",
"max_line_length": 175,
"num_lines": 61,
"path": "/faceDetection.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "#-*-coding:utf8-*-#\n\nimport cv2\nfrom PIL import Image, ImageDraw\nimport numpy as np\n\ndef detectFaces(image_name):\n #img = cv2.imread(image_name)\n img = Image.open(image_name)\n # face_cascade = cv2.CascadeClassifier(\"C:\\\\Users\\Asshole\\Anaconda3\\pkgs\\opencv-3.2.0-np112py36_203\\Library\\etc\\haarcascades\\haarcascade_frontalface_default.xml\") #face\n # face_cascade = cv2.CascadeClassifier(\"E:\\Python\\PycharmProjects\\ImgHash\\img\\ma\\\\negdata\\data\\cascade.xml\") #mayun\n face_cascade = cv2.CascadeClassifier(\"E:\\Python\\PycharmProjects\\ImgHash\\img\\\\brand\\\\negdata\\data\\cascade.xml\")\n\n #gray = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)\n img = img.convert('L')\n gray = np.asarray(img)\n '''\n 参数3:scaleFactor - -表示在前后两次相继的扫描中,搜索窗口的比例系数。默认为1.1\n 即每次搜索窗口依次扩大10 %;\n 参数4:minNeighbors - -表示构成检测目标的相邻矩形的最小个数(默认为3个)。\n 如果组成检测目标的小矩形的个数和小于min_neighbors - 1都会被排除。\n 如果min_neighbors为0, 则函数不做任何操作就返回所有的被检候选矩形框,\n 这种设定值一般用在用户自定义对检测结果的组合程序上\n '''\n #1.1 即每次图像尺寸减小比例10%\n #5 每一个目标至少检测到4次才算真的目标\n faces = face_cascade.detectMultiScale(gray, 1.07, 5)\n result = []\n for (x, y, width, height) in faces:\n result.append((x, y, x+width, y+height))\n return result\n\ndef drawFaces(image_name):\n faces = detectFaces(image_name)\n if faces:\n img = Image.open(image_name)\n draw_instance = ImageDraw.Draw(img)\n for (x1,y1,x2,y2) in faces:\n # region = (x1, y1, x2, y2)\n # cropImg = img.crop(region)\n draw_instance.rectangle((x1,y1,x2,y2), outline=(255, 0, 0))\n # cropImg.save('E:\\Python\\PycharmProjects\\ImgHash\\img\\\\faces\\\\'+str(x1)+'.jpg')\n # img.save('drawfaces_'+image_name)\n Image._show(img)\n\nif __name__ == '__main__':\n folder = 'E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\'\n picName = 'brd.jpg'\n path = folder + picName\n drawFaces(path)\n # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\whereisma.jpg')\n # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\arrow.jpg')\n # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\bruce.jpg')\n # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\bat.jpg')\n # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\gakki.jpg') #gakki 1.07\n # # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\prison.jpg')\n\n\n\n # drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\brd.jpg')\n drawFaces('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\s.jpg')\n"
},
{
"alpha_fraction": 0.5776113271713257,
"alphanum_fraction": 0.584269642829895,
"avg_line_length": 36.546875,
"blob_id": "9ef7a2cef76e46c9eb22f63136a9d7a99e45438f",
"content_id": "ed348ade3f984192fe8561e05988ed37d5c1f70a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2589,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 64,
"path": "/recognition.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "import dlib, os, glob, numpy\nfrom skimage import io\nfrom tkinter import filedialog, messagebox\n\n\ndef descriptor(path):\n detector = dlib.get_frontal_face_detector()\n shape = dlib.shape_predictor('..\\shape_predictor_68_face_landmarks.dat')\n faceRecog = dlib.face_recognition_model_v1('..\\dlib_face_recognition_resnet_model_v1.dat')\n windows = dlib.image_window()\n descriptors = []\n for i in glob.glob(os.path.join(path)):\n print('Processing file: {}'.format(i))\n img = io.imread(i)\n windows.clear_overlay()\n windows.set_image(img)\n detect = detector(img, 1) #人脸检测\n print('Number of faces detected: {}'.format(len(detect)))\n for d in detect:\n sp = shape(img, d) #关键点检测\n windows.clear_overlay()\n windows.add_overlay(d)\n windows.add_overlay(sp)\n face_descriptor = faceRecog.compute_face_descriptor(img, sp) #描述子提取\n arr = numpy.array(face_descriptor)\n descriptors.append(arr)\n return descriptors\n\n\ndef recogFace():\n detector = dlib.get_frontal_face_detector() # 正脸检测器\n shape = dlib.shape_predictor('..\\shape_predictor_68_face_landmarks.dat') # 关键点检测器\n # 人脸识别模型\n faceRecog = dlib.face_recognition_model_v1('..\\dlib_face_recognition_resnet_model_v1.dat')\n # descriptors = []\n descriptors = descriptor('..\\img\\\\faces\\*.jpg')\n #读取待检测图片\n img = io.imread(openFile())\n # img = io.imread('..\\img\\\\faces\\\\torec\\\\6.jpg')\n detect = detector(img, 1)\n dist = []\n for d in detect:\n sp = shape(img, d)\n face_descriptor = faceRecog.compute_face_descriptor(img, sp)\n d_test = numpy.array(face_descriptor)\n #欧氏距离\n for i in descriptors:\n dist_ = numpy.linalg.norm(i - d_test)\n dist.append(dist_)\n candidate = ['施瓦辛格','马云','马云','斯嘉丽约翰逊','施瓦辛格',\n '斯嘉丽约翰逊','奥巴马','奥巴马','奥巴马','山下智久','金正恩','金正恩', \\\n '库里', '库里']\n dt = dict(zip(candidate, dist))\n dt_sorted = sorted(dt.items(), key=lambda d:d[1])\n messagebox.showinfo('whosthis',str(dt_sorted[0][0]))\n # print('its: ',dt_sorted[0][0])\n # dlib.hit_enter_to_continue()\n\ndef openFile():\n op = filedialog.askopenfilename(title='打开文件',filetypes = [('Img','*.bmp *.jpg')])\n return op\n\nif __name__ == '__main__':\n recogFace()\n"
},
{
"alpha_fraction": 0.5502462983131409,
"alphanum_fraction": 0.5694581270217896,
"avg_line_length": 31.75806427001953,
"blob_id": "91f0c5268a485a879110a443e5b0f3470a692c34",
"content_id": "a77fb67cb258415d399f4ab08c531b1c4f5099f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2128,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 62,
"path": "/PCAfail.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "import os, glob, cv2\nimport numpy as np\nfrom PIL import Image\n\n\nfolder = 'E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\ma'\n\n\ndef pca(data, k):\n data = np.float32(np.mat(data))\n rows, cols = data.shape #图像大小\n data_mean = np.mean(data, 0) #均值\n Z = data - np.tile(data_mean, (rows, 1))\n T = Z*Z.T\n D, V = np.linalg.eig(T) #特征值与特征向量\n V1 = V[:, :k] #取前K个特征向量\n V1 = Z.T*V1\n for i in range(k): #特征向量归一化\n V1[:, i] /= np.linalg.norm(V1[:, i])\n data_new = np.dot(Z, V1)\n return data_new, data_mean, V1\n\n\n#covert image to vector\ndef img2vector(filename):\n #img = cv2.imread(filename, 0) #read as 'gray'\n img = Image.open(filename)\n img = img.convert('L')\n rows, cols = img.size\n imgVector = np.zeros((1,rows*cols)) #create a none vectore:to raise speed\n imgVector = np.reshape(img,(1,rows*cols)) #change img from 2D to 1D\n return imgVector\n\n\ndef convertL(file):\n img = Image.open(file)\n img = img.convert('L')\n return img\n\ndef loadImgSet(folder):\n trainData = []; testData = []; yTrain = []; yTest = []\n for k in range(10):\n data = [convertL(d) for d in glob.glob('E:\\\\Python\\\\PycharmProjects\\\\ImgHash\\\\img\\\\ma\\\\*.jpg')]\n trainData.extend(np.ravel(data[i]) for i in range(10))\n testData.extend(np.ravel(data[0]))\n yTest.extend([k]*1)\n yTrain.extend([k]*10)\n return np.array(trainData), np.array(yTrain), np.array(testData), np.array(yTest)\n\n\ndef main():\n xTrain_, yTrain, xTest_, yTest = loadImgSet(folder)\n num_train, num_test = xTrain_.shape[0], xTest_.shape[0]\n\n xTrain, data_mean, V = pca(xTrain_, 10)\n xTest = np.array((xTest_- np.tile(data_mean, (num_test, 1))) * V) #特征脸在特征向量下的数据\n\n yPredict = [yTrain[np.sum((xTrain-np.tile(d, (num_train, 1)))**2, 1).argmin()] for d in xTest]\n print(\"欧式距离法识别率:%.2f%%\"% ((yPredict == np.array(yTest)).mean()*100))\n\nif __name__ == '__main__':\n main()"
},
{
"alpha_fraction": 0.39694657921791077,
"alphanum_fraction": 0.45038166642189026,
"avg_line_length": 15.375,
"blob_id": "b22664bfdedda53c826cb25a10f6f63f4f8e18c9",
"content_id": "a5ec37af9e37cde5b7b93b7d7719ee22a3e0e960",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 131,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 8,
"path": "/hamming.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "def hamming(str, str1):\n ham = 0\n\n for i in range(0, 15):\n if str[i] != str1[i]:\n ham += 1\n\n return ham\n"
},
{
"alpha_fraction": 0.7148014307022095,
"alphanum_fraction": 0.7689530849456787,
"avg_line_length": 29.77777862548828,
"blob_id": "cc358e959d550f9b49a13d068cc5100f53d2a9c0",
"content_id": "ff6995da18901894f0557bb49ce3021618c9dfeb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 277,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 9,
"path": "/ffmpeg.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "import subprocess\n\nffmpegPath = \"E:\\Python\\PycharmProjects\\ImgHash\\\\ffmpeg-20170605-4705edb-win64-static\\\\bin\\\\ffplay.exe \"\ncurMediaPath = \"E:\\Python\\PycharmProjects\\ImgHash\\\\img\\\\test.mp4\"\n\ncmd = ffmpegPath + curMediaPath\n# os.popen(cmd)\n# os.system(cmd)\nsubprocess.call(cmd)\n"
},
{
"alpha_fraction": 0.5798279047012329,
"alphanum_fraction": 0.59560227394104,
"avg_line_length": 31.200000762939453,
"blob_id": "ebca2f8c1551fc397f537e99560a847d1798d055",
"content_id": "77255f65f75837f820cec31c5392565f77c75712",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2132,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 65,
"path": "/recognitionFail.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport cv2, os\n\n\ndef loadImg(path):\n matrix = np.mat(np.zeros((9, 64*64)))\n j = 0\n for i in os.listdir(path):\n if i.split('.')[1] == 'jpg':\n try:\n img = cv2.imread(path+i, 0)\n except:\n print('load %s failed' % i)\n matrix[j, :] = np.mat(img).flatten()\n j += 1\n return matrix\n'''\nstep1: load the face image data ,get the matrix consists of all image\nstep2: average the FaceMat\nstep3: calculate the difference of avgimg and all image data(FaceMat)\nstep4: calculate eigenvector of covariance matrix (because covariance matrix will cause memory error)\n'''\n\ndef recogVector(selecthr = 0.8):\n faceMat = loadImg('E:\\Python\\PycharmProjects\\ImgHash\\img\\m').T ######\n avgImg = np.mean(faceMat, 1)\n diffTrain = faceMat - avgImg\n eigVals, eigVects = np.linalg.eig(np.mat(diffTrain.T * diffTrain))\n eigSortIndex = np.argsort(-eigVals)\n for i in range(np.shape(faceMat)[1]): ########\n if (eigVals[eigSortIndex[:i]]/eigVals.sum()).sum() >= selecthr:\n eigSortIndex = eigSortIndex[:i]\n break\n covVects = diffTrain * eigVects[:,eigSortIndex] #the eigenvector of covariance matrix\n #avgImg均值图像,covVects协方差矩阵的特征向量,diffTrain偏差矩阵\n return avgImg, covVects, diffTrain\n\n\ndef whosthis(oringinImg, faceVector, avgImg, difftrain):\n diff = oringinImg.T - avgImg\n wVec = faceVector.T * diff\n res = 0\n resVal = np.inf\n for i in range(9):\n trainVec = faceVector.T * difftrain[:,i]\n if (np.array(wVec - trainVec)**2).sum() < resVal:\n res = i\n resVal = (np.array(wVec - trainVec)**2).sum()\n return res+1\n\n\ndef similar(oriImg):\n avgImg, faceVector, diffTrain = recogVector()\n oriImg = cv2.imread(oriImg, 0)\n gray = np.mat(oriImg).flatten()\n if whosthis(gray, faceVector, avgImg, diffTrain) == 1:\n return True\n else:\n return False\n\nif __name__ == '__main__':\n if similar('D:\\\\6.bmp'):\n print('1111')\n else:\n print('0')"
},
{
"alpha_fraction": 0.6728110313415527,
"alphanum_fraction": 0.7165898680686951,
"avg_line_length": 27.933332443237305,
"blob_id": "cae4ba362c2b5f870499abaf582d397832692d8e",
"content_id": "f7bc8ab3207877dc78ec75d739fb05163a9153be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1685,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 30,
"path": "/avHash.py",
"repo_name": "swiich/face_recognize",
"src_encoding": "UTF-8",
"text": "#coding:utf-8\n\n'''\n第一步,缩小尺寸。\n将图片缩小到8×8的尺寸,总共64个像素。这一步的作用是去除图片的细节,只保留结构、明暗等基本信息,摒弃不同尺寸、比例带来的图片差异。\n第二步,简化色彩。\n将缩小后的图片,转为64级灰度。也就是说,所有像素点总共只有64种颜色。\n第三步,计算平均值。\n计算所有64个像素的灰度平均值。\n第四步,比较像素的灰度。\n将每个像素的灰度,与平均值进行比较。大于或等于平均值,记为1;小于平均值,记为0。\n第五步,计算哈希值。\n将上一步的比较结果,组合在一起,就构成了一个64位的整数,这就是这张图片的指纹。组合的次序并不重要,只要保证所有图片都采用同样次序就行了。\n得到指纹以后,就可以对比不同的图片,看看64位中有多少位是不一样的。在理论上,这等同于计算“汉明距离”(Hamming distance)。如果不相同的数据位不超过5,就说明两张图片很相似;如果大于10,就说明这是两张不同的图片。\n'''\n\nfrom PIL import Image\n\ndef get_hash(img_path):\n img = Image.open(img_path)\n #antialias 抗锯齿\n #convert L为转换为灰度图像\n img = img.resize((8, 8), Image.ANTIALIAS).convert('L')\n #像素平均值\n avg = sum(list(img.getdata()))/64.0\n\n #将avg和每个像素比较,得到长度为64的字符串\n str = ''.join(map(lambda i: '0' if i < avg else '1', img.getdata()))\n #str切割,每4个字符一组,转成16进制字符\n return ''.join(map(lambda x: '%x' % int(str[x:x+4], 2), range(0, 64, 4)))\n"
}
] | 9 |
banicamarian/PAIC_project | https://github.com/banicamarian/PAIC_project | 084b2b707a678a9c78c21a9e8bc65e9b0965767d | 54aca918056e78221fde19379f1128c291bb423d | f4d51aa0458496fc4db20746fb7184c2b71e2008 | refs/heads/master | 2020-12-07T08:07:53.560724 | 2020-01-08T23:44:03 | 2020-01-08T23:44:03 | 232,680,269 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4599916338920593,
"alphanum_fraction": 0.49392542243003845,
"avg_line_length": 32.61971664428711,
"blob_id": "dcc95a5575ca853adf8be45f4f2df15ee04a82c2",
"content_id": "e1f6a5dbf9188a6eaa972a33bcff031bd9c883c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2387,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 71,
"path": "/NEAVF.py",
"repo_name": "banicamarian/PAIC_project",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport time\n\ndef AVD(kernel, k_size):\n \"\"\"\n Aggregated Vector Distance(AVD)\n :param kernel: Window of pixels.\n :param k_size: Numbers of aggregated vector distance(number of pixels).\n :return: Computed aggregated vector distance.\n \"\"\"\n R = np.zeros(9)\n for i in range(0, k_size):\n for j in range(1, k_size):\n R[i] += np.sqrt((kernel[i][0] - kernel[j][0])**2 + (kernel[i][1] - kernel[j][1])**2 + (kernel[i][2] - kernel[j][2])**2)\n return R\n\ndef DALS(kernel, index):\n \"\"\"\n :param kernel: Here we select only the third index which represents R value.\n :param index: The order of the central pixel.\n :return: Distribution adapted local similarity.\n \"\"\"\n lambda_used = 1\n F = ((1 / 4) * (kernel[4][3] - kernel[0][3]) + lambda_used) / ((1 / index) * (kernel[index][3] - kernel[0][3]) + 1)\n return F\n\ndef NEAVF(image):\n \"\"\"\n :param image: Image with noise.\n :return: np.array Filtrated image.\n \"\"\"\n gamma = 0.5\n delta = 9\n\n start = time.time()\n row, col, _ = image.shape\n img_out = image.astype(np.float32).copy()\n img_result = image.astype(np.float32).copy()\n F = np.ones((row, col))\n\n for i in range(1, row - 1):\n for j in range(1, col - 1):\n kernel = np.zeros((9, 4))\n kernel[:, 0:3] = image[i - 1:i + 2, j - 1:j + 2, :].reshape((9, 3))\n kernel[[0, 4]] = kernel[[4, 0]]\n CP = kernel[0]\n kernel[:, 3] = AVD(kernel, 9)\n kernel = kernel[kernel[:, 3].argsort(kind='mergesort')]\n img_result[i][j][0] = kernel[0][0]\n img_result[i][j][1] = kernel[0][1]\n img_result[i][j][2] = kernel[0][2]\n for k in range(9):\n if CP[0] == kernel[k][0] and CP[1] == kernel[k][1] and CP[2] == kernel[k][2]:\n l = k\n break\n if l >= 4:\n F[i][j] = DALS(kernel, l)\n else:\n F[i][j] = 1\n\n for i in range(1, row - 1):\n for j in range(1, col - 1):\n H = np.sum(np.power(F[i - 1:i + 2, j - 1:j + 2].reshape(9), gamma))\n if H >= delta:\n img_out[i][j][:] = image[i][j][:]\n else:\n img_out[i][j][:] = img_result[i][j][:]\n\n stop = time.time()\n print(\"Time NEAVF = {0}\".format(stop-start))\n return img_out\n"
},
{
"alpha_fraction": 0.5983865261077881,
"alphanum_fraction": 0.6134689450263977,
"avg_line_length": 32.151161193847656,
"blob_id": "b932b698c3bf4ce8ce96d8c50c1ae5d6754a9cde",
"content_id": "1daeef9e03404c119dc21e30800adfa994188eb2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5702,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 172,
"path": "/utils.py",
"repo_name": "banicamarian/PAIC_project",
"src_encoding": "UTF-8",
"text": "import glob\nimport cv2\nimport numpy as np\nfrom skimage.transform import resize\nimport math\nimport matplotlib.pyplot as plt\n\nplt.rcParams.update({'figure.max_open_warning': 0})\n\ndef read_images(path):\n \"\"\"\n :param path: Path to the image dataset.\n :return: A list of images.\n \"\"\"\n files = []\n extension = ['jpg', 'JPG']\n [files.extend(glob.glob(path + \"/*.\" + e)) for e in extension]\n img_list = [cv2.imread(file) for file in files]\n return img_list\n\ndef resize_img(img_list, size):\n \"\"\"\n :param img_list: List with images.\n :param size: Wanted size.\n :return: Resized image.\n \"\"\"\n index = 0\n for img in img_list:\n img_list[index] = resize(img, size).astype(np.float32)\n index += 1\n return img_list\n\ndef show_image_list(img_list, figure_name):\n \"\"\"\n :param img_list: List of images.\n :param figure_name: Name of the final figure.\n :return: All the images in one figure.\n \"\"\"\n index = 1\n img_size = len(img_list)\n plot_dim = math.ceil(math.sqrt(img_size))\n plt.figure(figure_name, figsize=(15, 15), dpi=100)\n for img in img_list:\n plt.subplot(plot_dim, plot_dim, index), plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n plt.title(\"Image {0}\".format(index))\n index += 1\n plt.show()\n plt.close('all')\n\ndef kernel_c(img, window_size, i, j, channel):\n \"\"\"\n Select from an image a window centered at (i, j), with the dimension of window_size.\n :param img: Source image.\n :param window_size: The size of window(window_size*window_size matrix).\n :param i: Horizontal coordinate of the center.\n :param j: Vertical coordinate of the center.\n :param channel: If the image is RGB, select the channel.\n :return: The centered window.\n \"\"\"\n dim = window_size // 2\n return img[i - dim:i + dim + 1, j - dim:j + dim + 1, channel]\n\ndef kernel(img, window_size, i, j):\n \"\"\"\n Select from an image a window centered at (i, j), with the dimension of window_size.\n :param img: Source image.\n :param window_size: The size of window(window_size*window_size matrix).\n :param i: Horizontal coordinate of the center.\n :param j: Vertical coordinate of the center.\n :return: The centered window.\n \"\"\"\n dim = window_size // 2\n return img[i - dim:i + dim + 1, j - dim:j + dim + 1]\n\ndef gaussian_noise(image, sigma):\n \"\"\"\n :param image: Source image.\n :param sigma: Measure of the amount of variation or dispersion of a pixel.\n :return: np.array: Image with gaussian noise.\n \"\"\"\n row, col, ch = image.shape\n mean = 0\n gauss = np.random.normal(mean, sigma, (row, col, ch))\n gauss = gauss.reshape(row, col, ch)\n noisy_image = image + gauss\n noisy_image = np.clip(noisy_image, 0, 1)\n return np.float32(noisy_image)\n\ndef salt_and_pepper(image, total_area):\n \"\"\"\n :param image: Initial image.\n :param total_area: Area affected by noise.\n :return: np.array Image with salt an pepper noise.\n \"\"\"\n amount = total_area / 2\n row, col, ch = image.shape\n s_vs_p = 0.5\n out = np.copy(image)\n # Salt mode\n num_salt = np.ceil(amount * image.size * s_vs_p)\n coords = [np.random.randint(0, i - 1, int(num_salt)) for i in image.shape]\n out[tuple(coords)] = 1\n\n # Pepper mode\n num_pepper = np.ceil(amount * image.size * (1. - s_vs_p))\n coords = [np.random.randint(0, i, int(num_pepper)) for i in image.shape]\n out[tuple(coords)] = 0\n return out\n\ndef median_filter(image):\n \"\"\"\n :param image: The input image.\n :return: numpy array: Image without salt and pepper noise.\n \"\"\"\n row, col, ch = image.shape\n out_image = image.astype(np.float32).copy()\n for i in range(1, row - 1):\n for j in range(1, col - 1):\n R = np.sort(kernel_c(image, 3, i, j, 0), axis=None)\n G = np.sort(kernel_c(image, 3, i, j, 1), axis=None)\n B = np.sort(kernel_c(image, 3, i, j, 2), axis=None)\n out_image[i, j, 0] = R[4]\n out_image[i, j, 1] = G[4]\n out_image[i, j, 2] = B[4]\n return out_image\n\ndef mean_filter(image):\n \"\"\"\n :param image: The input image.\n :return: numpy array: Image without gaussian noise.\n \"\"\"\n row, col, ch = image.shape\n out_image = image.astype(np.float32).copy()\n for i in range(1, row - 1):\n for j in range(1, col - 1):\n out_image[i, j, 0] = np.sum(kernel_c(image, 3, i, j, 0), axis=None) / 9\n out_image[i, j, 1] = np.sum(kernel_c(image, 3, i, j, 1), axis=None) / 9\n out_image[i, j, 2] = np.sum(kernel_c(image, 3, i, j, 2), axis=None) / 9\n return out_image\n\ndef MSE(initial_image, final_image):\n \"\"\"\n Mean Squared Error(MSE).\n :param initial_image: Image without noise.\n :param final_image: Image affected by noise.\n :return: Value of mean squared error.\n \"\"\"\n return (np.square(initial_image - final_image)).mean(axis=None)\n\ndef PSNR(initial_image, final_image):\n \"\"\"\n Peak Signal to Noise Ratio\n :param initial_image: Image without noise.\n :param final_image: Image affected by noise.\n :return:\n \"\"\"\n mse = MSE(initial_image, final_image)\n if mse == 0:\n return 100\n PIXEL_MAX = 1.0\n return 20 * math.log10(PIXEL_MAX / math.sqrt(mse))\n\ndef MAE(initial_image, final_image):\n \"\"\"\n Mean Absolute Error is the average vertical distance between each point and the identity line.\n :param initial_image: Image without noise.\n :param final_image: Image affected by noise.\n :return: Value of mean absolute value.\n \"\"\"\n row, col, ch = initial_image.shape\n dimension = row * col * ch\n return (np.sum(np.abs(initial_image - final_image))) / dimension\n"
},
{
"alpha_fraction": 0.6806282997131348,
"alphanum_fraction": 0.764397919178009,
"avg_line_length": 37.20000076293945,
"blob_id": "cfc2776a7350b46e8879b83dbbd9ffb47e4624a1",
"content_id": "feba3e45a5f6f0a10e043f567ac3f37ef948f644",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 191,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 5,
"path": "/README.md",
"repo_name": "banicamarian/PAIC_project",
"src_encoding": "UTF-8",
"text": "# PAIC_project\n## NEAVF implementation --> project in PyCharm\n\n## *This project was made following this article:* \nLink: [https://www.sciencedirect.com/science/article/pii/S1077201405000653]\n"
},
{
"alpha_fraction": 0.6789010763168335,
"alphanum_fraction": 0.7035164833068848,
"avg_line_length": 44.049503326416016,
"blob_id": "88af84acb4bc8d53d301d89e4a827fb90c7c11b1",
"content_id": "581f4f8090d0f502f5aca62f1b041b79a6741b0b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4550,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 101,
"path": "/main.py",
"repo_name": "banicamarian/PAIC_project",
"src_encoding": "UTF-8",
"text": "\"\"\"\nSAA - self adaptive algorithm\nFAS - fast adaptive similarity filter\nImages from https://pixabay.com/images/search/\n\"\"\"\nimport time\n\nimport utils\nimport constants\nimport NEAVF\n\nstart = time.time()\n\n# Read, resize and plot images\nsize = (300, 400)\nimg_list = utils.read_images(constants.DATASET_PATH)\nimg_list = utils.resize_img(img_list, size)\n# utils.show_image_list(img_list, \"Initial images!\")\n\nimg_list_gauss = []\nimg_list_impulsive = []\nindex = 0\nfor img in img_list:\n img_list_gauss.append(utils.gaussian_noise(img, 0.1))\n img_list_impulsive.append(utils.salt_and_pepper(img, 0.1))\n index += 1\n# utils.show_image_list(img_list_gauss, \"Images with gaussian noise! 0.1\")\n# utils.show_image_list(img_list_impulsive, \"Image with salt and pepper noise! 0.1\")\n\n# Two images with 2 values of noise\nimg_list_gauss_005 = []\nimg_list_gauss_015 = []\nimg_list_impulsive_005 = []\nimg_list_impulsive_015 = []\nfor i in range(2):\n # Make sure we have the same dimension of the list to compute the errors(SNR, MAE, PSNR)\n img_list.append(img_list[i])\n img_list_gauss_005.append(utils.gaussian_noise(img_list[i], 0.1))\n img_list_gauss_015.append(utils.gaussian_noise(img_list[i], 0.15))\n img_list_impulsive_005.append(utils.salt_and_pepper(img_list[i], 0.05))\n img_list_impulsive_015.append(utils.salt_and_pepper(img_list[i], 0.15))\n# utils.show_image_list(img_list_gauss_005, \"Images with gaussian noise! 0.05\")\n# utils.show_image_list(img_list_gauss_015, \"Images with gaussian noise! 0.15\")\n# utils.show_image_list(img_list_impulsive_005, \"Image with salt and pepper noise! 0.05\")\n# utils.show_image_list(img_list_impulsive_015, \"Image with salt and pepper noise! 0.15\")\n\n\nimg_list_impulsive = img_list_impulsive + img_list_impulsive_005 + img_list_impulsive_015\nimg_list_impulsive_median = []\nimg_list_impulsive_neavf = []\nindex = 0\nfor img in img_list_impulsive:\n img_list_impulsive_median.append(utils.median_filter(img_list_impulsive[index]))\n img_list_impulsive_neavf.append(NEAVF.NEAVF(img_list_impulsive[index]))\n index += 1\n# utils.show_image_list(img_list_impulsive_median, \"Image with salt and pepper after median filter\")\n# utils.show_image_list(img_list_impulsive_neavf, \"Image with salt and pepper after NEAVF filter\")\n\n\nimg_list_gauss = img_list_gauss + img_list_gauss_005 + img_list_gauss_015\nimg_list_gauss_mean = []\nimg_list_gauss_neavf = []\nindex = 0\nfor img in img_list_gauss:\n img_list_gauss_mean.append(utils.mean_filter(img_list_gauss[index]))\n img_list_gauss_neavf.append(NEAVF.NEAVF(img_list_gauss[index]))\n index += 1\n# utils.show_image_list(img_list_gauss_mean, \"Image with gauss after mean filter!\")\n# utils.show_image_list(img_list_gauss_neavf, \"Image with gauss after NEAVF filter!\")\n\n# Noise measure\npsnr_gauss = []\npsnr_impulsive = []\npsnr_NEAVF_impulsive = []\npsnr_NEAVF_gauss = []\nmae_gauss = []\nmae_impulsive = []\nmae_NEAVF_impulsive = []\nmae_NEAVF_gauss = []\nimg_list.append(img_list[0])\nimg_list.append(img_list[1])\nfor i in range(len(img_list)):\n psnr_gauss.append(utils.PSNR(img_list[i], img_list_gauss_mean[i]))\n psnr_impulsive.append(utils.PSNR(img_list[i], img_list_impulsive_median[i]))\n psnr_NEAVF_impulsive.append(utils.PSNR(img_list[i], img_list_impulsive_neavf[i]))\n psnr_NEAVF_gauss.append(utils.PSNR(img_list[i], img_list_gauss_neavf[i]))\n print(\"PSNR for Image {0} after gaussian noise + filter(median) = {1};\".format(i, psnr_gauss[i]))\n print(\"PSNR for Image {0} after impulsive noise + filter(median) = {1};\".format(i, psnr_impulsive[i]))\n print(\"PSNR for Image {0} after impulsive noise + filter(NEAVF) = {1};\".format(i, psnr_NEAVF_impulsive[i]))\n print(\"PSNR for Image {0} after gauss noise + filter(NEAVF) = {1};\".format(i, psnr_NEAVF_gauss[i]))\n mae_gauss.append(utils.MAE(img_list[i], img_list_gauss[i]))\n mae_impulsive.append(utils.MAE(img_list[i], img_list_impulsive[i]))\n mae_NEAVF_impulsive.append(utils.MAE(img_list[i], img_list_impulsive_neavf[i]))\n mae_NEAVF_gauss.append(utils.MAE(img_list[i], img_list_gauss_neavf[i]))\n print(\"MAE for Image {0} after gaussian noise + filter(mean) = {1};\".format(i, mae_gauss[i]))\n print(\"MAE for Image {0} after impulsive noise + filter(median) = {1};\".format(i, mae_impulsive[i]))\n print(\"MAE for Image {0} after impulsive noise + filter(NEAVF) = {1};\".format(i, mae_NEAVF_impulsive[i]))\n print(\"MAE for Image {0} after gaussian noise + filter(NEAVF) = {1};\".format(i, mae_NEAVF_gauss[i]))\n\nend = time.time()\nprint(\"Total time = {0} seconds.\".format(end - start))\n"
},
{
"alpha_fraction": 0.739130437374115,
"alphanum_fraction": 0.739130437374115,
"avg_line_length": 23,
"blob_id": "3d413c2591a1967ee96ea751f7b22e308efa4b2e",
"content_id": "03ac15167fcbcfa3ece15dc27908498f61947ae3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 23,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 1,
"path": "/constants.py",
"repo_name": "banicamarian/PAIC_project",
"src_encoding": "UTF-8",
"text": "DATASET_PATH = \"images\""
}
] | 5 |
leovd100/Django-controle-de-gastos | https://github.com/leovd100/Django-controle-de-gastos | cb309ee1df57342c0f49e7f10bc8045db2bcdf90 | 0ef98bca4dc8e7af625c67343b8b951386aa7eaa | 6abb8a5a2ee0f72ce6dfdbb7453eda9e05ef2e05 | refs/heads/master | 2022-10-12T01:32:55.114058 | 2020-06-05T14:31:12 | 2020-06-05T14:31:12 | 268,777,909 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7424242496490479,
"alphanum_fraction": 0.7424242496490479,
"avg_line_length": 27.285715103149414,
"blob_id": "be87486142d8dfbdf90dbc81bfbd936980efd370",
"content_id": "3af63d86871bc6fc81ea904409f9730a2cd2ffab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 198,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 7,
"path": "/contas/forms.py",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "from django.forms import ModelForm\nfrom .models import Transacao\n\nclass Constru_Form (ModelForm):\n\tclass Meta:\n\t\tmodel = Transacao\n\t\tfields = ['data','descricao','valor','categoria','observacoes'] "
},
{
"alpha_fraction": 0.6972201466560364,
"alphanum_fraction": 0.6994740962982178,
"avg_line_length": 21.200000762939453,
"blob_id": "616fcb8640e8dac57cd2414454ceffe5d8e3dee0",
"content_id": "89279799ba1c7d820b4fa9d88928f3e4bc20832b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1331,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 60,
"path": "/contas/views.py",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse\nfrom datetime import datetime\nfrom .models import Transacao\nfrom .forms import Constru_Form\n\n\n\n# Create your views here.\ndef home(request):\n\tdata = {\n\t\t'trasacoes':['t1','t2','t3']\n\t}\n\t#data_today = f'{datetime.now().day}/{datetime.now().month}/{datetime.now().year}'\n\n\tdata['agora'] = datetime.now().strftime('%d/%m/%Y')\n\n\t#html = '<html><body>Data e Hora atual %s</body></html>'% agora\n\treturn render(request, 'contas/home.html',data)\n\n\n\ndef listagem(request):\n\tdata = {}\n\tdata['transacao'] = Transacao.objects.all()\n\treturn render(request, 'contas/lista.html',data)\n\n\ndef newTransfer(request):\n\tdata = {}\n\tformulario = Constru_Form(request.POST or None)\n\n\tif formulario.is_valid():\n\t\tformulario.save()\n\t\treturn redirect('url_lista')\n\n\n\tdata['form'] = formulario\n\treturn render(request, 'contas/form.html',data)\n\ndef update(request, primary):\n\tdata = {}\n\ttransacao = Transacao.objects.get(pk=primary)\n\tform = Constru_Form(request.POST or None, instance=transacao)\n\n\tif form.is_valid():\n\t\tform.save()\n\t\treturn redirect('url_lista')\n\n\n\tdata['form'] = form\n\tdata['obj'] = transacao\n\treturn render(request, 'contas/form.html',data)\n\n\n\ndef delete(request, pk):\n\tif form.is_valid():\n\t\tTransacao.objects.get(pk=pk).delete()\n\treturn redirect('url_lista')"
},
{
"alpha_fraction": 0.7397576570510864,
"alphanum_fraction": 0.7570686936378479,
"avg_line_length": 27.393442153930664,
"blob_id": "eafcde0d23f1f2598698afe37adef99d5590ad9b",
"content_id": "7d2da2446d5231f1b6749ee94052b034f073370b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1752,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 61,
"path": "/README.md",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "<h1>Controle de gasto Python Django</h1>\n\n<h2>Projeto controle de gastos com Python e Django.</h2>\n\n\n\n<h3>Pastas criadas no desenvolvimento do projeto. </h3>\nVenv - Virtualenv com Django instalado e db.sqlite3 \n\n\tpython -m venv venv\n\n<h3>Dentro da vm é instalado o Django com o comando:</h3>\n\n\tpip install django \n\n<h3>Contas - App</h3>\n\n\tpython manage.py startapp contas\n\n<h3>Controle_gastos - projeto Django, para cria-lo , devemos digitar: </h3>\n\n\tdjango-admin startproject controle_gastos\n\n\n<h3>db.sqlite3 - O django vem com o banco de dados SQLite 3, quando intalado ele carrega várias tabélas para serem utilizadas.</h3>\n\n\tpython manage.py migrate\n\n\nPara acessar a aplicação após uma modificação no banco de dodos utiliza o\ncomando migrations\npython manage.py migrate e makemigrations \n\t\nmakemigrations - responsável por criar novas migrações.\n\nmigrations - aplica as migrações.\n\nDocumentação - migrations no Django \n\n[Migrations - Django documentation ](http://https://docs.djangoproject.com/en/3.0/topics/migrations/ \"Migrations - Django documentation \")\n\n\n\n\t\n\tpython manage.py makemigrations\n\tpython manage.py migrate \n\tmanage.py runserver - > Inicia o server \n\n<h3>Formulario feito em Django para captura de dados</h3>\n\n\n\n<h4>Atualização:</h4>\n\tImplementacão de CSS na página HOME\n\t\n\n\n\t\nPara usar CSS no Django, devemos criar uma pasta chamada Static onde dentro dela podemos manipular os arquivos CSS e JS.\n\n\n\n"
},
{
"alpha_fraction": 0.7437722682952881,
"alphanum_fraction": 0.7437722682952881,
"avg_line_length": 27,
"blob_id": "861192ecb849ebdeac8a67cc694c57069d71a3d4",
"content_id": "cb0acf94c94447facc839f3cb675d83a66e2aa1e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 281,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 10,
"path": "/controle_gastos/contas/views.py",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse\nimport datetime\n\n\n# Create your views here.\ndef home(requests):\n\tagora = datetime.datetime.now()\n\t#html = '<html><body>Data e Hora atual %s</body></html>'% agora\n\treturn render(requests, 'contas/home.html')\n\n"
},
{
"alpha_fraction": 0.7341576218605042,
"alphanum_fraction": 0.7465224266052246,
"avg_line_length": 25.91666603088379,
"blob_id": "6be6dda5803b387b21b0e9863cc640871bec2f66",
"content_id": "a7446eb0b57c3bd5fcdb93f5a85132f52889c1a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 647,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 24,
"path": "/contas/models.py",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom random import randint\n\n# Create your models here.\nclass Categoria(models.Model):\n\tnome = models.CharField(max_length = 100)\n\tdt_criacao = models.DateTimeField(auto_now_add = True)\n\n\tdef __str__(self):\n\t\treturn self.nome\n\n\t\t\n\nclass Transacao(models.Model):\n\tdata = models.DateTimeField()\n\tdescricao = models.CharField(max_length = 200)\n\tvalor = models.DecimalField(max_digits=7,decimal_places=2)\n\tcategoria = models.ForeignKey(Categoria,on_delete=models.CASCADE)\n\tobservacoes = models.TextField(null=True, blank=True)\n\tclass Meta:\n\t\tverbose_name_plural = 'Transacoes'\n\n\tdef __str__(self):\n\t\treturn self.descricao\n\n"
},
{
"alpha_fraction": 0.5410764813423157,
"alphanum_fraction": 0.5949008464813232,
"avg_line_length": 19.764705657958984,
"blob_id": "fd9fab2ab72972cbe3e4bfac8f091f6458623b34",
"content_id": "ce0e2721439bb85232bfddf133bca64fd1d8f5e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 353,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 17,
"path": "/contas/migrations/0003_auto_20200603_1205.py",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.0.6 on 2020-06-03 15:05\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('contas', '0002_transacao'),\n ]\n\n operations = [\n migrations.AlterModelOptions(\n name='transacao',\n options={'verbose_name_plural': 'Transacoes'},\n ),\n ]\n"
},
{
"alpha_fraction": 0.7405405640602112,
"alphanum_fraction": 0.7405405640602112,
"avg_line_length": 36,
"blob_id": "c3cbc4da685c402b0b10a2b64cf42a564607874b",
"content_id": "38147dfcd12222a3208ea0bff0e2b4c8f3c8deab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 185,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 5,
"path": "/venv/Scripts/django-admin.py",
"repo_name": "leovd100/Django-controle-de-gastos",
"src_encoding": "UTF-8",
"text": "#!c:\\users\\demetrio\\desktop\\projetos python\\djagoudemy\\venv\\scripts\\python.exe\nfrom django.core import management\n\nif __name__ == \"__main__\":\n management.execute_from_command_line()\n"
}
] | 7 |
ouyangwuhai/freemind_to_excel | https://github.com/ouyangwuhai/freemind_to_excel | f27548eef64eb466744a6dec46bb18948c7bd37e | bbf67d3b8de7f6aa6865f2bd52a6023dadb149ee | 3fbd9cd269e0b89f339604952c294d346a17104b | refs/heads/master | 2020-06-30T21:07:51.844492 | 2019-06-16T12:49:59 | 2019-06-16T12:49:59 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6320127844810486,
"alphanum_fraction": 0.6415846943855286,
"avg_line_length": 35.8725471496582,
"blob_id": "9153d752867235dc623b4fbbbd9f213e2278c8ce",
"content_id": "52f6c1d0a2b0690ff779a03687488225bb8a3ae0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7522,
"license_type": "permissive",
"max_line_length": 92,
"num_lines": 204,
"path": "/lib/freemind_to_project.py",
"repo_name": "ouyangwuhai/freemind_to_excel",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n#\n# freemind_to_project.py\n#\n# Copyright LiKneu 2019\n#\n\nimport pprint # Similar to Data::Dumper\nimport lxml.etree as ET # handling XML\n\ndef main(args):\n return 0\n\ndef get_pj_path(fm_path):\n '''Takes the XPATH of the Freemind parent element and returns the XPATH\n of the Freemind element'''\n pj_path = fm_path.replace('map', 'Project')\n pj_path = pj_path.replace('node', 'Tasks', 1)\n pj_path = pj_path.replace('node', 'Task')\n return pj_path\n\ndef node_to_note(pj_root, pj_uid, pj_text):\n '''Stores the note information of Freemind into a <Note> tag of Project'''\n # Create the XPATH search pattern to find the Project UID...\n xpath_pattern = '//UID[text()=\"' + str(pj_uid) + '\"]'\n task_id = pj_root.xpath(xpath_pattern)\n # ...to get the parent node of this UID...\n pj_parent = task_id[0].getparent()\n # ...to attach the <Notes> to it.\n note = ET.SubElement(pj_parent, \"Notes\")\n note.text = pj_text\n\ndef node_to_task(pj_parent, pj_name, pj_uid, pj_level=1):\n '''Creates <Task> element with the necessary subelements and attaches it to\n a given parent element.\n \n # Example of a minimalistic task as it was produced from a Freemap\n # Project 2003 export\n <Task>\n <UID>0</UID>\n <ID>1</ID>\n <Type>1</Type>\n <IsNull>0</IsNull>\n <OutlineNumber>1</OutlineNumber>\n <OutlineLevel>0</OutlineLevel>\n <Name>Stamm</Name>\n <FixedCostAccrual>1</FixedCostAccrual>\n <RemainingDuration>PT8H0M0S</RemainingDuration>\n <Estimated>1</Estimated>\n <PercentComplete>0</PercentComplete>\n <Priority/>\n </Task>\n '''\n\n task = ET.SubElement(pj_parent, \"Task\")\n \n # Name (occurence: min = 0, max = 1)\n # The name of the task.\n name = ET.SubElement(task, \"Name\")\n # Project doesn't allow linebreaks in task titles so we remove them here\n name.text = pj_name.replace('\\n', ' ')\n \n # UID (occurences: min = max = 1)\n # The UID element is a unique identifier\n uid = ET.SubElement(task, \"UID\")\n uid.text = str(pj_uid)\n \n # ID (occurence: min = 0, max = 1)\n # For Resource, ID is the position identifier of the resource within the\n # list of resources. For Task, it is the position identifier of the task\n # in the list of tasks.\n pj_id = ET.SubElement(task, \"ID\")\n pj_id.text = \"1\"\n\n # Type\n # 0 = fixed units\n # 1 = fixed duration\n # 2 = fixed work\n pj_type = ET.SubElement(task, \"Type\")\n pj_type.text = \"1\"\n\n # IsNull\n # 0 = task or ressource is not null\n # 1 = task or ressource is null\n isnull = ET.SubElement(task, \"IsNull\")\n isnull.text = \"0\"\n \n # OutlineNumber\n # Indicates the exact position of a task in the outline. For example,\n # \"7.2\" indicates that a task is the second subtask under the seventh\n # top-level summary task.\n outlinenumber = ET.SubElement(task, \"OutlineNumber\")\n outlinenumber.text = \"1\"\n\n # OutlineLevel\n # The number that indicates the level of a task in the project outline\n # hierarchy.\n outlinelevel = ET.SubElement(task, \"OutlineLevel\")\n outlinelevel.text = str(pj_level)\n\n # FixedCostAccrual\n # Indicates how fixed costs are to be charged, or accrued, to the cost\n # of a task. (no info on Microsofts webpage what values are allowed)\n fixedcostaccrual = ET.SubElement(task, \"FiexedCostAccrual\")\n fixedcostaccrual.text = \"1\"\n \n # RemainingDuration (occurence: min = 0, max = 1)\n # The amount of time required to complete the unfinished portion of a\n # task. Remaining duration can be calculated in two ways, either based on\n # percent complete or on actual duration.\n # TODO: check if RemainingDuration is necessary\n remainingduration = ET.SubElement(task, \"RemainingDuration\")\n remainingduration.text = \"PT8H0M0S\" # 8 Hours, 0 Minutes, 0 seconds \n \n # Estimated (occurence: min = 0, max = 1)\n # Indicates whether the task's duration is flagged as an estimate.\n # 0 = not estimated (i.e. precise)\n # 1 = estimated\n estimated = ET.SubElement(task, \"Estimated\")\n estimated.text = \"1\"\n \n # PercentComplete (occurence: min = 0, max = 1)\n # The percentage of the task duration completed.\n percentcomplete = ET.SubElement(task, \"PercentComplete\")\n percentcomplete.text = \"0\"\n \n # Priority (occurence: min = 0, max = 1)\n # Indicates the level of importance assigned to a task, with 500 being\n # standard priority; the higher the number, the higher the priority.\n priority = ET.SubElement(task, \"Priority\")\n \ndef to_project(input_file, output_file):\n '''Converts the Freeemind XML file into an MS Project XML file.'''\n\n print('Converting to Project')\n print('Input file : ' + input_file)\n print('Output file: ' + output_file)\n \n # Prepare the Fremmind XML tree\n # Read freemind XML file into variable\n fm_tree = ET.parse(input_file)\n # Get the root element of the Freemind XML tree\n fm_root = fm_tree.getroot()\n \n # Prepare the root element of the new Project XML file\n attrib = {'xmlns':'http://schemas.microsoft.com/project'}\n pj_root = ET.Element('Project', attrib)\n \n # Based on the Project root element we define the Project tree\n pj_tree = ET.ElementTree(pj_root)\n \n # Add the Project <Title> element\n pj_title = ET.SubElement(pj_root, 'Title')\n pj_title.text = fm_root.xpath('/map/node/@TEXT')[0]\n \n # Add the Project <Tasks> element\n pj_tasks = ET.SubElement(pj_root, 'Tasks')\n pj_path = pj_tree.getpath(pj_tasks)\n \n # Dict holding mapping table of Freemind and Project UIDs\n uid_mapping = {}\n \n # UID in Project starts with 0\n pj_uid = 0\n \n for fm_node in fm_root.iter('node'):\n # Determine the parent of the present Freemind element\n fm_parent = fm_node.getparent()\n # Determine the XPATH of the Freemind parent element\n fm_parent_path = fm_tree.getpath(fm_parent)\n # Determine the XPATH of the Project parent element from the XPATH of\n # the Freemind parent element\n pj_parent_path = get_pj_path(fm_parent_path)\n print(\"pj parent path:\", pj_parent_path)\n # Determine the Project parent element based on its XPATH\n pj_parent = pj_tree.xpath(pj_parent_path)[0]\n # Get the Project text from the Freemind node TEXT attribute\n pj_name = fm_node.get(\"TEXT\")\n # Get the Freemind ID from its attribute\n fm_id = fm_node.get(\"ID\")\n # Add Freemind ID and Project UID to mapping table (Dictionary)\n uid_mapping[fm_id] = pj_uid\n # calculate level with help of XPATH\n # Count number of dashed as indicator for the depth of the structure\n # -1 is for the 1st that is not needed here\n pj_level = pj_tree.getpath(pj_parent).count('/')-1\n node_to_task(pj_parent=pj_parent, pj_name=pj_name, pj_uid=pj_uid, pj_level=pj_level)\n \n # Check if node has an attached note <richcontent>\n fm_note = fm_node.xpath('normalize-space(./richcontent)')\n # If yes, remove all html tags and store the remaining text in a\n # Project <Note> tag\n if fm_note:\n node_to_note(pj_root=pj_root, pj_uid=pj_uid, pj_text=fm_note)\n \n pj_uid += 1\n\n # Write the Project XML tree to disc\n pj_tree.write(output_file, pretty_print=True, xml_declaration=True, encoding=\"utf-8\")\n\n\nif __name__ == '__main__':\n import sys\n sys.exit(main(sys.argv))\n"
},
{
"alpha_fraction": 0.6153140664100647,
"alphanum_fraction": 0.6221916675567627,
"avg_line_length": 32.5538444519043,
"blob_id": "16c59c410bf8b36a3806f84203dfdf76572ff089",
"content_id": "5f63fcfbe0a04993e65cd866a8f870b0be23448b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2181,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 65,
"path": "/lib/freemind_to_word.py",
"repo_name": "ouyangwuhai/freemind_to_excel",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n#\n# freemind_to_word.py\n#\n# Copyright LiKneu 2019\n#\n\nimport lxml.etree as ET # handling XML\nimport docx # handling of MS Word documents \n\ndef main(args):\n return 0\n\ndef to_word(input_file, output_file):\n '''Converts the Freeemind XML file into an MS Mord DOCX file.'''\n\n print('Converting to Word')\n print('Input file : ' + input_file)\n print('Output file: ' + output_file)\n \n # Prepare the Fremmind XML tree\n # Read freemind XML file into variable\n fm_tree = ET.parse(input_file)\n # Get the root element of the Freemind XML tree\n fm_root = fm_tree.getroot()\n \n doc = docx.Document()\n \n # Get the title of the freemind root node and use it as title of the\n # cover page\n title_cover = fm_root.xpath('/map/node/@TEXT')[0]\n main_title = doc.add_heading(title_cover, 0)\n \n # Add a paragraph below the title heading. I found no other way to directly\n # add a page break after the heading.\n para = doc.add_paragraph(' ')\n doc.paragraphs[0].runs[0].add_break(docx.enum.text.WD_BREAK.PAGE)\n \n # Walk through all <node> tags of the freemind file\n for fm_node in fm_root.iter('node'): \n # Get the content of the \"TEXT\" attribute of the node\n wd_title = fm_node.get(\"TEXT\")\n # Calculate how deep we have gone into the tree structure to choose a\n # fitting heading style\n wd_title_level = fm_tree.getpath(fm_node).count('/')-1\n # The 1st 4 levels get different heading styles..\n if wd_title_level <= 4:\n doc.add_heading(wd_title, wd_title_level)\n else:\n #..all the other levels stick with one style\n doc.add_paragraph(wd_title, 'Heading5')\n \n # Check if node has an attached note <richcontent>\n fm_note = fm_node.xpath('normalize-space(./richcontent)')\n # If yes, remove all html tags and store the remaining text in a\n # Project <Note> tag\n if fm_note:\n doc.add_paragraph(fm_note, 'Normal')\n\n # Write the Word DOCX file to disc\n doc.save(output_file)\n\nif __name__ == '__main__':\n import sys\n sys.exit(main(sys.argv))\n"
},
{
"alpha_fraction": 0.7548682689666748,
"alphanum_fraction": 0.7651775479316711,
"avg_line_length": 28.593219757080078,
"blob_id": "97fe625a5233046e498ad98a7ba1338e38a4e92f",
"content_id": "ac28b7c62314912985f24820431ab7bc11f282bf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1746,
"license_type": "permissive",
"max_line_length": 167,
"num_lines": 59,
"path": "/README.md",
"repo_name": "ouyangwuhai/freemind_to_excel",
"src_encoding": "UTF-8",
"text": "# Freemind to Microsoft Excel converter\n\nConvert Freemind files wit extension .mm (XML file format) to Excel files with extension .xlsx.\n\nEven though that Freemind already has an option to copy/paste text information from a mindmap to Excel, the result looses too much structural information for my needs.\n\nHere is an example mindmap:\n\n<img src=\"docs/freemind_map.png\" width=250>\n\nThis is the result by copy/paste into Excel:\n\n<img src=\"docs/Excel_copy_paste.png\" width=200>\n\nThis is the result using freemind_to_excel:\n\n<img src=\"docs/Excel_freemind_to_excel.png\" width=260>\n\nPresently only the nodes text is transferred into the Excel file. All other elements like notes, icons, etc. are skipped.\n\n## Usage\n\n main.py --excel input_file.mm output_file.xlsx\n\nPresently tested on Win7 only.\n\n# Freemind to Microsoft Project converter\n\nConvert Freemind files wit extension .mm (XML file format) to Project files with extension .XML.\n\nContrary to the Excel conversion the resulting XML file has to be imported into Project.\n\nThis is the result using main.py with the --project option:\n\n<img src=\"docs/Project_freemind_to_project.png\" width=260>\n\nThe conversion considers nodes, richcontent i.e. notes (text only) and hirarchy.\n\n## Usage\n\n main.py --project input_file.mm output_file.xml\n\nPresently tested on Win7 only.\n\n# Freemind to Microsoft Word converter\n\nConvert Freemind files with extension .mm (XML file format) to Word files with extension .DOCX.\n\nThis is the result using main.py with the --word option:\n\n<img src=\"docs/Word_freemind_to_word.png\" width=260>\n\nThe conversion considers nodes, richcontent i.e. notes (text only) and hirarchy.\n\n## Usage\n\n main.py --word input_file.mm output_file.docx\n\nPresently tested on Win7 only.\n"
},
{
"alpha_fraction": 0.6261682510375977,
"alphanum_fraction": 0.6339563727378845,
"avg_line_length": 31.100000381469727,
"blob_id": "a0ea817c306c50038ac82e4e0864551c51fb5d61",
"content_id": "a55cfd2705ee0ea95cb1ff785314ef7ed4001929",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1284,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 40,
"path": "/main.py",
"repo_name": "ouyangwuhai/freemind_to_excel",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n#\n# Copyright LiKneu 2019\n#\n# Converts Freemind XML files into other file formats like:\n# * Excel\n#\nimport sys # command line options\n\nfrom lib.freemind_to_excel import to_excel\nfrom lib.freemind_to_project import to_project\nfrom lib.freemind_to_word import to_word\n\ndef main():\n script = sys.argv[0] # filename of this script\n if len(sys.argv)==1: # no arguments so print help message\n print('''Usage: main.py action filename\n action must be one of --excel --project --word''')\n return\n \n action = sys.argv[1]\n # check if user input of action is an allowed/defined one\n assert action in ['--excel', '--project', '--word'], \\\n 'Action is not one of --excel --project --word: ' + action\n \n input_file = sys.argv[2] # filename of the to be converted mindmap\n output_file = sys.argv[3] # filename of the export file\n process(action, input_file, output_file)\n\ndef process(action, input_file, output_file):\n '''Processes user input.'''\n if action == '--excel':\n to_excel(input_file, output_file)\n elif action == '--project':\n to_project(input_file, output_file)\n elif action == '--word':\n to_word(input_file, output_file)\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6168224215507507,
"alphanum_fraction": 0.6220145225524902,
"avg_line_length": 37.01315689086914,
"blob_id": "93671d0c0bb18be0e9123a52e21a0685b14f41c7",
"content_id": "ad30eb95bc7d23ff81ef5b1b5e337b5798d3e61d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2889,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 76,
"path": "/lib/freemind_to_excel.py",
"repo_name": "ouyangwuhai/freemind_to_excel",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\ndef to_excel(input_file, output_file):\n '''Converts the Freeemind XML file into an Excel sheet.'''\n\n import lxml.etree as ET # handling XML\n import openpyxl # handling Excel files\n from tqdm import tqdm # progress bar\n\n print('Converting to Excel')\n print('Input file : ' + input_file)\n print('Output file: ' + output_file)\n \n wb = openpyxl.Workbook() # Create Excel workbook object\n \n # Get the active sheet (Excel creates sheet 'Sheet') automatically with the\n # creation of the workbook\n sheet = wb.active\n \n tree = ET.parse(input_file) # Read freemind XML file into variable\n\n root = tree.getroot() # Get the root element of the XML file\n\n # Get text of the root node with a XPATH ...\n sheet_title = root.xpath('/map/node/@TEXT')\n sheet.title = sheet_title[0] # ..and name the Excel sheet like it\n \n base = [] # Create list to hold the cells for export to EXCEL\n headings = ['Level 0']\n max_levels = 0\n\n for node in tqdm(root.iter('node')):\n path = tree.getpath(node) # Get the xpath of the node\n \n # Count number of dashed as indicator for the depth of the structure\n # -2 is for the 1st 2 levels that are not needed here\n nr_dashes = path.count('/') - 2\n \n if nr_dashes > max_levels: # To generate headings for each column we\n max_levels = nr_dashes # need to know the max depth of the tree\n # Add a heading to the list for each level of the tree\n headings.append('Level ' + str(max_levels))\n \n # Count numbers of elements already in the list\n nr_base = len(base)\n \n # If we have less dashes than elements we now that we jumped back a level\n if nr_dashes < nr_base:\n # And so we reduce the list to the same number like levels\n base = base[:nr_dashes]\n\n # Append the text of the element to the list\n base.append(node.get('TEXT'))\n \n # If there are no children of type 'node' below the present node add\n # data to Excel sheet\n if not node.xpath('.//node'):\n sheet.append(base)\n\n # Insert an empty row on top of all rows to make room for header cells\n sheet.insert_rows(1)\n \n col = 1 # First row in Excel is 1 (not 0)\n # Since it seems not possible to inser a whole row with data at the top of\n # a shhet we have to iterate through the columns and write cells separately\n for heading in headings:\n sheet.cell(row=1, column=col).value = heading\n col+=1\n \n # Add autofilter\n # We use sheet.dimensions to let openpyxl determin the number of rows & columns\n sheet.auto_filter.ref = sheet.dimensions\n \n sheet.freeze_panes = 'A2'\n \n wb.save(output_file) # Save the workbook we have created above to disc\n"
}
] | 5 |
not-tanh/rasa_code_templates | https://github.com/not-tanh/rasa_code_templates | 8891818b098a0eee8b7a53df212eebdff873a993 | 5f62358903ac483a4aeebff8592155ed7edd646d | aa3deae5a9250f5b0a14513afd6eabc4517af7b0 | refs/heads/master | 2023-02-13T20:37:04.686437 | 2021-01-05T14:09:08 | 2021-01-05T14:09:08 | 325,605,098 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5971385836601257,
"alphanum_fraction": 0.6212349534034729,
"avg_line_length": 34.89189147949219,
"blob_id": "c861bbbf4a5a98a65df4d94e5e7bee2916ab8431",
"content_id": "d8f1588b15e4b403a7aba47a02017a7b6ca5eced",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1328,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 37,
"path": "/action_get_news.py",
"repo_name": "not-tanh/rasa_code_templates",
"src_encoding": "UTF-8",
"text": "from typing import Any, Text, Dict\n\nfrom rasa_sdk import Action, Tracker\nfrom rasa_sdk.executor import CollectingDispatcher\nimport requests\nfrom bs4 import BeautifulSoup\n\nnews_url = 'https://vnexpress.net/'\nheaders = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '\n 'AppleWebKit/537.36 (KHTML, like Gecko) '\n 'Chrome/64.0.3282.186 '\n 'Safari/537.36'\n}\n\n\nclass ActionGetNews(Action):\n def name(self) -> Text:\n # Implement this yourself\n pass\n\n @staticmethod\n def get_news():\n # Returns top story in vnexpress\n r = requests.get(news_url, headers=headers)\n soup = BeautifulSoup(r.text, 'lxml')\n top_story_article = soup.find('article', {'class': 'article-topstory'})\n title = top_story_article.find('h3', {'class': 'title-news'}).text.strip()\n desc = top_story_article.find('p', {'class': 'description'}).text\n location_stamp = top_story_article.find('span', {'class': 'location-stamp'}).text.strip()\n # Delete location stamp in description\n desc = desc.replace(location_stamp, '', 1).strip()\n return '%s\\n%s' % (title, desc)\n\n def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]):\n # Implement this yourself\n pass\n"
},
{
"alpha_fraction": 0.581379771232605,
"alphanum_fraction": 0.6014735698699951,
"avg_line_length": 36.32500076293945,
"blob_id": "d0aa93b7b7b4fdbc5afb963675d8c0784bf72c2e",
"content_id": "9bf411e51caaef91d805715a96bd34061ab468d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1519,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 40,
"path": "/action_get_weather.py",
"repo_name": "not-tanh/rasa_code_templates",
"src_encoding": "UTF-8",
"text": "from typing import Any, Text, Dict\n\nfrom rasa_sdk import Action, Tracker\nfrom rasa_sdk.executor import CollectingDispatcher\nimport requests\nfrom bs4 import BeautifulSoup\n\n\ngoogle_url = 'https://www.google.com.vn/search'\nheaders = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '\n 'AppleWebKit/537.36 (KHTML, like Gecko) '\n 'Chrome/64.0.3282.186 '\n 'Safari/537.36'\n}\n\n\nclass ActionGetWeatherInfo(Action):\n def name(self) -> Text:\n return 'action_get_weather_info'\n\n @staticmethod\n def get_weather():\n # Returns weather condition and degree\n r = requests.get(google_url,\n params={'q': 'thời tiết ở Hà Nội', 'cr': 'countryVN', 'lr': 'lang_vi', 'hl': 'vi'},\n headers=headers)\n soup = BeautifulSoup(r.text, 'lxml')\n weather_box = soup.find('div', {'id': 'wob_dcp'})\n if weather_box:\n degree = soup.find('span', {'id': 'wob_tm'})\n condition = weather_box.text\n return condition.lower(), degree.text.strip()\n return '', ''\n\n def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]):\n condition, degree = self.get_weather()\n dispatcher.utter_message(template='utter_weather', condition=condition, degree=degree)\n # This also works\n # dispatcher.utter_message(text=\"Hôm nay {condition}, nhiệt độ hiện tại là {degree} độ bạn nhé!\")\n"
},
{
"alpha_fraction": 0.5517420768737793,
"alphanum_fraction": 0.5553821921348572,
"avg_line_length": 36.70588302612305,
"blob_id": "615fb6332c366b7ab9564385ac8a3ae27207e529",
"content_id": "b4f906a1fedb57359577ce65354b290e16138f9e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1923,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 51,
"path": "/ner.py",
"repo_name": "not-tanh/rasa_code_templates",
"src_encoding": "UTF-8",
"text": "from rasa.nlu.components import Component\nimport underthesea\n\n\nclass NamedEntityRecognition(Component):\n \"\"\"A pre-trained NER component\"\"\"\n name = \"ner\"\n provides = [\"entities\"]\n requires = []\n defaults = {}\n language_list = [\"vi\"]\n\n def __init__(self, component_config=None):\n super(NamedEntityRecognition, self).__init__(component_config)\n\n def train(self, training_data, cfg, **kwargs):\n \"\"\"Not needed, because the the model is pretrained\"\"\"\n pass\n\n @staticmethod\n def convert_to_rasa(tokens):\n \"\"\"Convert model output into the Rasa NLU compatible output format.\"\"\"\n entities = []\n entity, value = '', ''\n for token in tokens:\n if token[3] != 'O':\n bi, ner = token[3].split('-')\n if bi == 'B':\n if entity:\n entities.append({\"value\": value, \"confidence\": 1, \"entity\": entity, \"extractor\": \"ner\"})\n entity, value = ner, token[0]\n else:\n value = value + ' ' + token[0]\n elif entity:\n entities.append({\"value\": value, \"confidence\": 1, \"entity\": entity, \"extractor\": \"ner\"})\n entity, value = '', ''\n if entity:\n entities.append({\"value\": value, \"confidence\": 1, \"entity\": entity, \"extractor\": \"ner\"})\n return entities\n\n def process(self, message, **kwargs):\n \"\"\"Retrieve the text message, pass it to the classifier\n and append the prediction results to the message class.\"\"\"\n if 'text' in message.data:\n tokens = underthesea.ner(message.data['text'])\n entities = self.convert_to_rasa(tokens)\n message.set(\"entities\", entities, add_to_output=True)\n\n def persist(self, file_name, dir_name, **kwargs):\n \"\"\"Pass because a pre-trained model is already persisted\"\"\"\n pass\n"
},
{
"alpha_fraction": 0.6205533742904663,
"alphanum_fraction": 0.6561264991760254,
"avg_line_length": 33.89655303955078,
"blob_id": "d80eea77d35f21d4924f275dc5bad99b477aa2d0",
"content_id": "8f66dbe5db33fb853d1fc00cabcc316ab256d492",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1012,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 29,
"path": "/validation_actions.py",
"repo_name": "not-tanh/rasa_code_templates",
"src_encoding": "UTF-8",
"text": "from typing import Text, List, Any, Dict\n\nfrom rasa_sdk import Tracker, FormValidationAction\nfrom rasa_sdk.executor import CollectingDispatcher\nfrom rasa_sdk.types import DomainDict\n\n\nclass ValidateInquiryForm(FormValidationAction):\n def name(self) -> Text:\n return \"validate_inquiry_form\"\n\n @staticmethod\n def account_number_db() -> List[Text]:\n return [\"012345678912\", \"012345678913\", \"012345678914\"]\n\n def validate_account_number(\n self,\n slot_value: Any,\n dispatcher: CollectingDispatcher,\n tracker: Tracker,\n domain: DomainDict,\n ) -> Dict[Text, Any]:\n \"\"\"Validate cuisine value.\"\"\"\n if slot_value.lower() in self.account_number_db():\n # validation succeeded, set the value of the \"cuisine\" slot to value\n return {\"account_number\": slot_value}\n else:\n # validation failed, set this slot to None so that the user will be asked for the slot again\n return {\"account_number\": None}\n"
},
{
"alpha_fraction": 0.56802898645401,
"alphanum_fraction": 0.583548903465271,
"avg_line_length": 39.29166793823242,
"blob_id": "658b1664e218a8c0985164ee6c22b3f5f0e35846",
"content_id": "df6ba57b16db815c5928e830f9c6940ccce50522",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1949,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 48,
"path": "/action_get_weather_v3.py",
"repo_name": "not-tanh/rasa_code_templates",
"src_encoding": "UTF-8",
"text": "from typing import Any, Text, Dict\n\nfrom rasa_sdk import Action, Tracker\nfrom rasa_sdk.executor import CollectingDispatcher\nimport requests\nfrom bs4 import BeautifulSoup\n\ngoogle_url = 'https://www.google.com.vn/search'\nheaders = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '\n 'AppleWebKit/537.36 (KHTML, like Gecko) '\n 'Chrome/64.0.3282.186 '\n 'Safari/537.36'\n}\n\n\nclass ActionGetWeatherInfo(Action):\n def name(self) -> Text:\n return 'action_get_weather_info'\n\n @staticmethod\n def get_weather_in_location(locations):\n # Returns list of weather conditions, degrees and location\n results = []\n if type(locations) is str:\n locations = [locations]\n locations = set(locations)\n for location in locations:\n r = requests.get(google_url,\n params={'q': 'thời tiết ở %s' % location, 'cr': 'countryVN', 'lr': 'lang_vi', 'hl': 'vi'},\n headers=headers)\n soup = BeautifulSoup(r.text, 'lxml')\n weather_box = soup.find('div', {'id': 'wob_dcp'})\n if weather_box:\n degree = soup.find('span', {'id': 'wob_tm'})\n condition = weather_box.text\n results.append((condition.lower(), degree.text.strip(), location))\n return results\n\n def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]):\n locations = tracker.get_slot('location')\n results = self.get_weather_in_location(locations)\n if results:\n for condition, degree, location in results:\n dispatcher.utter_message(template='utter_weather',\n condition=condition, degree=degree, location=location)\n else:\n dispatcher.utter_message(text='Xin lỗi bạn, mình không tìm thấy thông tin')"
}
] | 5 |
voonshunzhi/world | https://github.com/voonshunzhi/world | 71532556781991cf8192d915b1eaf23ac8e16b66 | b187c2019c16de28ee47d1044e5848f1f5111d62 | 843498a808cd26c4b1b633003961b83601196b10 | refs/heads/master | 2020-05-09T20:44:42.629192 | 2019-04-15T05:39:57 | 2019-04-15T05:39:57 | 181,419,062 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6325940489768982,
"alphanum_fraction": 0.6730954647064209,
"avg_line_length": 27.80555534362793,
"blob_id": "f8d4aa7e9266ab110e134142e03323ab61138cbb",
"content_id": "57221a9a0f7fab28a679ffbc87af2a6cc18d0cee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1037,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 36,
"path": "/migrations/versions/398de4f723c4_.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "\"\"\"empty message\n\nRevision ID: 398de4f723c4\nRevises: b6a4b6f45fc8\nCreate Date: 2019-04-12 09:37:07.818252\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '398de4f723c4'\ndown_revision = 'b6a4b6f45fc8'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_table('language',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('country_code', sa.String(length=128), nullable=True),\n sa.Column('language', sa.String(length=128), nullable=True),\n sa.Column('percentage_of_use', sa.Float(), nullable=True),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_foreign_key(None, 'CountryLanguage', 'language', ['language_id'], ['id'])\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_constraint(None, 'CountryLanguage', type_='foreignkey')\n op.drop_table('language')\n # ### end Alembic commands ###\n"
},
{
"alpha_fraction": 0.6370875835418701,
"alphanum_fraction": 0.6734926104545593,
"avg_line_length": 28.299999237060547,
"blob_id": "1bb96f9a90c514f7aec439e5642b1a0cb11933ca",
"content_id": "dc001e46d1b51f237488c6c8c219f4579b110539",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 879,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 30,
"path": "/migrations/versions/7cdfe2d4f328_.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "\"\"\"empty message\n\nRevision ID: 7cdfe2d4f328\nRevises: 5409c24fd08e\nCreate Date: 2019-04-15 13:06:21.520423\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '7cdfe2d4f328'\ndown_revision = '5409c24fd08e'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_constraint('city_country_code_fkey', 'city', type_='foreignkey')\n op.create_foreign_key(None, 'city', 'country', ['country_code'], ['country_code'], ondelete='CASCADE')\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_constraint(None, 'city', type_='foreignkey')\n op.create_foreign_key('city_country_code_fkey', 'city', 'country', ['country_code'], ['country_code'])\n # ### end Alembic commands ###\n"
},
{
"alpha_fraction": 0.5055900812149048,
"alphanum_fraction": 0.5590062141418457,
"avg_line_length": 35.54545593261719,
"blob_id": "7277c998ccda233fb3cbd89afe839dace5edcfd0",
"content_id": "d59008ab1d8b4448cfd05c4f76fa716a61332f4f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 805,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 22,
"path": "/world/models/seed/seedCountry.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "import csv\nfrom world.models.models import Country;\nfrom world import db;\n\nf = open('../country.csv')\ncsv_f = csv.reader(f)\n\n\nfor _ in range(1):\n next(csv_f)\n\nfor row in csv_f:\n print(row)\n row[4] = None if row[4] == 'NULL' or row[4] == '' else int(row[4])\n row[5] = None if row[5] == 'NULL' or row[5] == '' else int(row[5])\n row[6] = None if row[6] == 'NULL' or row[6] == '' else int(row[6])\n row[7] = None if row[7] == 'NULL' or row[7] == '' else float(row[7])\n row[8] = None if row[8] == 'NULL' or row[8] == '' else float(row[8])\n row[9] = None if row[9] == 'NULL' or row[9] == '' else int(row[9])\n country = Country(row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13])\n db.session.add(country);\n db.session.commit();\n\n"
},
{
"alpha_fraction": 0.6462736129760742,
"alphanum_fraction": 0.6674082279205322,
"avg_line_length": 38.661766052246094,
"blob_id": "702d8a5bdaa1898884f8f4c9320c94378765b976",
"content_id": "a1b50a6a1b1564f69609c2fda7f6c6f098ea6751",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2697,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 68,
"path": "/migrations/versions/d4c7203bf19c_.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "\"\"\"empty message\n\nRevision ID: d4c7203bf19c\nRevises: \nCreate Date: 2019-04-10 19:34:28.957803\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'd4c7203bf19c'\ndown_revision = None\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_table('country',\n sa.Column('id', sa.Integer(), nullable=True),\n sa.Column('country_code', sa.String(length=128), nullable=False),\n sa.Column('country_name', sa.String(length=128), nullable=True),\n sa.Column('continent', sa.String(length=128), nullable=True),\n sa.Column('region', sa.String(length=128), nullable=True),\n sa.Column('area', sa.Integer(), nullable=True),\n sa.Column('year_of_independence', sa.Integer(), nullable=True),\n sa.Column('population', sa.Float(), nullable=True),\n sa.Column('life_expectancy', sa.Float(), nullable=True),\n sa.Column('gnp', sa.Float(), nullable=True),\n sa.Column('gnpid', sa.Float(), nullable=True),\n sa.Column('alternative_name', sa.Text(), nullable=True),\n sa.Column('ruling_system', sa.Text(), nullable=True),\n sa.Column('founder', sa.Text(), nullable=True),\n sa.Column('iso_code', sa.String(length=128), nullable=True),\n sa.PrimaryKeyConstraint('country_code'),\n sa.UniqueConstraint('country_code')\n )\n op.create_table('language',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('country_code', sa.String(length=128), nullable=True),\n sa.Column('language', sa.String(length=128), nullable=True),\n sa.Column('percentage_of_use', sa.Float(), nullable=True),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_table('city',\n sa.Column('id', sa.Integer(), nullable=False),\n sa.Column('city_name', sa.String(length=128), nullable=True),\n sa.Column('province', sa.String(length=128), nullable=True),\n sa.Column('population', sa.Float(), nullable=True),\n sa.Column('country_code', sa.String(length=128), nullable=False),\n sa.ForeignKeyConstraint(['country_code'], ['country.country_code'], ),\n sa.PrimaryKeyConstraint('id')\n )\n op.create_foreign_key(None, 'CountryLanguage', 'country', ['country_code'], ['country_code'])\n op.create_foreign_key(None, 'CountryLanguage', 'language', ['language_id'], ['id'])\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_constraint(None, 'CountryLanguage', type_='foreignkey')\n op.drop_constraint(None, 'CountryLanguage', type_='foreignkey')\n op.drop_table('city')\n op.drop_table('language')\n op.drop_table('country')\n # ### end Alembic commands ###\n"
},
{
"alpha_fraction": 0.6490897536277771,
"alphanum_fraction": 0.6594476103782654,
"avg_line_length": 36.880950927734375,
"blob_id": "dca3c84cdd3f284da6290f1431929aa623a50069",
"content_id": "485300c5a81d49ec4ecf93d0088ad46dfd68e2a2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3186,
"license_type": "no_license",
"max_line_length": 177,
"num_lines": 84,
"path": "/world/models/models.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "from world import db;\n\nCountryLanguage = db.Table('CountryLanguage',\n db.Column('language_id', db.Integer, db.ForeignKey('language.id'), primary_key=True),\n db.Column('country_code', db.String(128), db.ForeignKey('country.country_code'), primary_key=True),\n)\n\nclass Country(db.Model):\n\n __tablename__ = \"country\"\n\n id = db.Column(db.Integer);\n cities = db.relationship('City', backref='country',lazy=True,passive_deletes=True)\n country_code = db.Column(db.String(128),unique=True,primary_key=True)\n country_name = db.Column(db.String(128));\n continent = db.Column(db.String(128));\n region = db.Column(db.String(128));\n area = db.Column(db.Integer);\n year_of_independence = db.Column(db.Integer,nullable=True);\n population = db.Column(db.Float);\n life_expectancy = db.Column(db.Float,nullable=True);\n gnp = db.Column(db.Float);\n gnpid = db.Column(db.Float,nullable=True);\n alternative_name = db.Column(db.Text);\n ruling_system = db.Column(db.Text);\n founder = db.Column(db.Text);\n iso_code = db.Column(db.String(128))\n\n\n def __init__(self,country_code,country_name,continent,region,area,year_of_independence,population,life_expectancy,gnp,gnpid,alternative_name,ruling_system,founder,iso_code):\n self.country_code = country_code;\n self.country_name = country_name\n self.continent = continent;\n self.region = region;\n self.area = area;\n self.year_of_independence = year_of_independence;\n self.population = population;\n self.life_expectancy = life_expectancy\n self.gnp = gnp;\n self.gnpid = gnpid;\n self.alternative_name = alternative_name;\n self.ruling_system = ruling_system;\n self.founder = founder;\n self.iso_code = iso_code;\n\n def __repr__(self):\n return \"This is the language of \" + self.country_name;\n\nclass City(db.Model):\n\n __tablename__ = \"city\"\n id = db.Column(db.Integer, primary_key=True);\n city_name = db.Column(db.String(128));\n province= db.Column(db.String(128));\n population = db.Column(db.Float);\n country_code = db.Column(db.String(128), db.ForeignKey('country.country_code',ondelete='CASCADE'),nullable=False)\n\n def __init__(self,city_name,country_code,province,population):\n self.city_name = city_name;\n self.population = population;\n self.country_code = country_code;\n self.province = province;\n\n def __repr__(self):\n return \"This is the language of \" + self.city_name;\n\nclass Language(db.Model):\n\n __tablename__ = \"language\"\n\n id = db.Column(db.Integer, primary_key=True);\n country_code = db.Column(db.String(128))\n language = db.Column(db.String(128));\n official_language = db.Column(db.Boolean,default=False, nullable=False),\n percentage_of_use = db.Column(db.Float)\n\n def __init__(self,country_code,language,official_language,percentage_of_use):\n self.language = language;\n self.country_code = country_code;\n self.official_language = official_language;\n self.percentage_of_use = percentage_of_use;\n\n def __repr__(self):\n return \"This is the language of \" + self.language ;\n\n\n\n\n"
},
{
"alpha_fraction": 0.6299694180488586,
"alphanum_fraction": 0.6788991093635559,
"avg_line_length": 22.35714340209961,
"blob_id": "47474b22058246549c6edc983e283ea92bdf049d",
"content_id": "8be55e89c275b00631a1ce9da65b48ee5c7a5010",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 654,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 28,
"path": "/migrations/versions/b6a4b6f45fc8_.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "\"\"\"empty message\n\nRevision ID: b6a4b6f45fc8\nRevises: d4c7203bf19c\nCreate Date: 2019-04-12 09:34:23.479972\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'b6a4b6f45fc8'\ndown_revision = 'd4c7203bf19c'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_unique_constraint(None, 'country', ['country_code'])\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_constraint(None, 'country', type_='unique')\n # ### end Alembic commands ###\n"
},
{
"alpha_fraction": 0.6686046719551086,
"alphanum_fraction": 0.6719961166381836,
"avg_line_length": 41.96875,
"blob_id": "abde03c1284e10f283202184754727eca76a006b",
"content_id": "ed47dd48147e54f7b85188e6bb064fd85f59e190",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4128,
"license_type": "no_license",
"max_line_length": 191,
"num_lines": 96,
"path": "/app.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "from world import app;\nfrom world.models.models import Language,CountryLanguage,City,Country;\nfrom flask import render_template,request,redirect,url_for,abort;\nimport csv\nfrom world import db;\n\[email protected](\"/\")\ndef index():\n return render_template(\"index.html\")\n\[email protected](\"/search\")\ndef search():\n search = request.args.get('search');\n languages = Language.query.filter(Language.language.like('%' + search.title() +'%')).distinct(Language.language).group_by(Language.id,Language.language).all()\n countries = Country.query.filter(Country.country_name.like('%' + search.title() + '%')).distinct(Country.country_name).group_by(Country.id,Country.country_name,Country.country_code).all()\n cites = City.query.filter(City.city_name.like('%' + search.title() + '%')).distinct(City.city_name).group_by(City.id,City.city_name).all()\n totalLength = len(cites + countries + languages)\n results = \"result\" if len(languages) == 0 or languages == 1 else \"results\"\n return render_template(\"search.html\",totalLength=totalLength,result=results,search=search,countries=countries,languages=languages,cities=cites);\n\[email protected](\"/country/<id>\")\ndef country(id):\n country = Country.query.get(id)\n return render_template(\"country.html\",country=country)\n\[email protected](\"/language/<id>\")\ndef language(id):\n language = Language.query.get(id)\n languages = Language.query.filter_by(language=language.language).distinct(Language.country_code).group_by(Language.id,Language.country_code).all()\n print(languages)\n return render_template(\"language.html\",language=language,languages=enumerate(languages))\n\[email protected](\"/city/<id>\")\ndef city(id):\n city = City.query.get(id)\n return render_template(\"city.html\",city=city)\n\[email protected](\"/update/city/<id>\",methods=['GET','POST'])\ndef updateCity(id):\n city = City.query.get(id)\n if request.method == 'GET':\n return render_template(\"update/city.html\",city=city)\n else:\n city.city_name = request.form['city_name']\n city.province = request.form['province']\n city.population = request.form['population']\n db.session.commit();\n return redirect(url_for('city',id=city.id),code=302)\n\[email protected](\"/update/country/<id>\",methods=['GET','POST'])\ndef updateCountry(id):\n country = Country.query.get(id)\n if request.method == 'GET':\n return render_template(\"update/country.html\",country=country)\n else:\n country.continent = request.form['continent']\n country.region = request.form['region']\n country.area = request.form['area'];\n country.year_of_independence = request.form['year_of_independence'];\n country.population = request.form['population'];\n country.life_expectancy = request.form['life_expectancy']\n country.gnp = request.form['gnp'];\n country.gnpid = request.form['gnpid'];\n country.alternative_name = request.form['alternative_name'];\n country.ruling_system = request.form['ruling_system'];\n country.founder = request.form['founder'];\n country.iso_code = request.form['iso_code'];\n db.session.commit();\n return redirect(url_for('country',id=country.country_code),code=302)\n\[email protected](\"/update/language/<id>\",methods=['GET','POST'])\ndef updateLanguage(id):\n language = Language.query.get(id);\n if request.method == 'GET':\n return render_template('update/language.html',language=language);\n else:\n language.percentage_of_use = request.form['percentage_of_use']\n db.session.commit();\n return redirect(url_for('language',id=language.id),code=302)\n\[email protected](\"/delete/country/<id>\",methods=['POST'])\ndef deleteCountry(id):\n if request.method == 'POST':\n country = Country.query.get(id);\n languages = Language.query.filter_by(country_code=id);\n for language in languages:\n db.session.delete(language);\n db.session.commit();\n db.session.delete(country);\n db.session.commit();\n return redirect(url_for('index'));\n else:\n return abort(404)\n \nif __name__ == \"__main__\":\n app.run(debug=True); "
},
{
"alpha_fraction": 0.5560975670814514,
"alphanum_fraction": 0.5804877877235413,
"avg_line_length": 18.4761905670166,
"blob_id": "ca6e8ef20a6f5d81a7a16f0e74270c638488fb7d",
"content_id": "4c527a51e17ea8e89dc5e131e714215b79e90268",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 410,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 21,
"path": "/world/models/seed/seedLanguage.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "import csv\nfrom world.models.models import Language;\nfrom world import db;\n\nf = open('../language.csv')\ncsv_f = csv.reader(f)\n\n\nfor _ in range(1):\n next(csv_f)\n\nfor row in csv_f:\n print(row)\n if row[2] == 'T':\n row[2] = True\n else:\n row[2] = False\n row[3] = float(row[3])\n language = Language(row[0],row[1],row[2],row[3])\n db.session.add(language);\n db.session.commit();\n\n"
},
{
"alpha_fraction": 0.6013985872268677,
"alphanum_fraction": 0.618881106376648,
"avg_line_length": 16.8125,
"blob_id": "62d3c9b0571b24ad4a56e532d00defe6a50ddddb",
"content_id": "15b551afc580852d3ff8ecfabaa0e8b11fa3638f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 286,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 16,
"path": "/world/models/seed/seedCity.py",
"repo_name": "voonshunzhi/world",
"src_encoding": "UTF-8",
"text": "import csv\nfrom world.models.models import City;\nfrom world import db;\n\nf = open('../city.csv')\ncsv_f = csv.reader(f)\n\n\nfor _ in range(1):\n next(csv_f)\n\nfor row in csv_f:\n print(row)\n city = City(row[1],row[2],row[3],row[4])\n db.session.add(city);\n db.session.commit();\n\n"
}
] | 9 |
tzuchyi/class_exercise | https://github.com/tzuchyi/class_exercise | 2f50a2bac75b0833148c59a1a1ccb885a0dcaed9 | f91c2803b90f062894d4435e44adfb5f1247ae94 | 18d5549b88470eb52af5fb4015b0266dfcefdf90 | refs/heads/master | 2021-12-03T02:58:54.287050 | 2021-11-30T09:07:06 | 2021-11-30T09:07:06 | 210,496,656 | 0 | 2 | null | null | null | null | null | [
{
"alpha_fraction": 0.43915343284606934,
"alphanum_fraction": 0.4582010507583618,
"avg_line_length": 27.636363983154297,
"blob_id": "09ccfc5ac01949c97adc171068db03bd0b485061",
"content_id": "cf39f2d447b18674df90606adaea7519e119f9f8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 945,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 33,
"path": "/Leetcode/710#_Random Pick with Blacklist_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "import random\nimport bisect\n\n\nclass Solution:\n def __init__(self, N, blacklist):\n self.N = N - 1\n self.black = sorted(blacklist)\n self.range = []\n self.weight = []\n self.blacklen = len(self.black)\n if self.blacklen:\n s = 0\n for r in self.black:\n if r - s >= 1:\n self.range.append([s, r - 1])\n s = r + 1\n if s < self.N + 1:\n self.range.append([s, self.N])\n\n weight = 0\n for r in self.range:\n weight = weight + r[1] - r[0] + 1\n self.weight.append(weight)\n\n def pick(self) -> int:\n if self.blacklen:\n r = self.range[bisect.bisect_left(\n self.weight, random.randint(1, self.weight[-1]))]\n return random.randint(r[0], r[1]) if r[1] > r[0] else r[0]\n\n else:\n return random.randint(0, self.N)\n"
},
{
"alpha_fraction": 0.3434704840183258,
"alphanum_fraction": 0.3470483124256134,
"avg_line_length": 18.48214340209961,
"blob_id": "ee26341f5997ba850ed86dfc58cc5a31ec4dbf9a",
"content_id": "5452c036c6caa5ebee7bfe9a2dd14e260b086f34",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1118,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 56,
"path": "/HW5/BFS_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "\nfrom collections import defaultdict \n\nclass Graph:\n\n def __init__(self): \n\n self.graph = defaultdict(list) \n\n def addEdge(self,u,v): \n self.graph[u].append(v) \n \n def BFS(self, s): \n \n ans=[]\n temp=[s]\n exsist=set(\"s\")\n \n\n \n while len(temp)>0 :\n \n out=temp.pop(0)\n ans.append(out)\n \n \n for i in self.graph[out]:\n if i not in ans and i not in exsist:\n temp.append(i)\n exsist.add(i)\n\n \n return ans\n \n \n \n def DFS(self, s):\n \n ans=[]\n temp=[s]\n exsist=set(\"s\")\n \n\n \n while len(temp)>0 :\n \n out=temp.pop(-1)\n ans.append(out)\n \n \n for i in self.graph[out]:\n if i not in ans and i not in exsist:\n temp.append(i)\n exsist.add(i)\n\n \n return ans\n \n \n"
},
{
"alpha_fraction": 0.6767441630363464,
"alphanum_fraction": 0.7488372325897217,
"avg_line_length": 38,
"blob_id": "6e34765d55383ddcbc2e4b35350ca5cf3b96f22d",
"content_id": "9504b5ca6a314dad8cfd51db2d942c8ccf507ea8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 482,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 11,
"path": "/HW1/readme.md",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "\n*作業參考資料在最底部\n\n[程式碼](https://github.com/tzuchyi/class_exercise/blob/master/HW1/QuickSort_change.ipynb)\n### [更新版程式碼](https://nbviewer.jupyter.org/github/tzuchyi/class_exercise/blob/master/1018hw/QuickSort_change.ipynb)\n\n\n### 流程圖\n\n\n### 參考資料\n\n"
},
{
"alpha_fraction": 0.6822537183761597,
"alphanum_fraction": 0.7485626935958862,
"avg_line_length": 41.08064651489258,
"blob_id": "6fff1afdb34c69bba8f9e2604e4d1f96bfbef441",
"content_id": "9f2f555e63d8e901488e056521edb63465f4bd3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3717,
"license_type": "no_license",
"max_line_length": 327,
"num_lines": 62,
"path": "/README.md",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "# 資料結構與演算法\n\n\n# 自我介紹 \n高子淇 06170101 3a \n\n 愛打傳說\n \n 喜歡躺在床上做所有事情/\n \n 超愛吃火鍋/\n \n 不喜歡做家事/\n \n 努力戒飲料但是很愛喝奶茶/\n \n# 課程導覽\n- week 1:簡介. \n- week 2:linked-list.\n >空間不需要連續,每一個節點連接下一個節點需要知道有多少節點,要從頭到尾走過\n- week 3:Stack_and_Queue. \n * stack\n >像取盤子一樣,一層一層往上疊,也只能取最上層的盤子,先進後出。ex網頁的上一頁或是word檔的復原\n * queue\n >像排隊一樣,不可以插隊,第一個進去的人,第一個出去。\n- week 4:insertion_sort\n- [week 5](https://github.com/tzuchyi/class_exercise/tree/master/HW1):Quick_Sort.\n * [程式碼](https://github.com/tzuchyi/class_exercise/blob/master/HW1/QuickSort_change.ipynb)\n- [week 6&7](https://github.com/tzuchyi/class_exercise/tree/master/HW2):heap_sort&merge_sort \n\n * [heap_sort](https://github.com/tzuchyi/class_exercise/blob/master/HW2/heap_sort_06170101.py)\n * [merge_sort](https://github.com/tzuchyi/class_exercise/blob/master/HW2/merge_sort_06170101.py)\n- week 8:Binary_tree. \n- [week 9](https://github.com/tzuchyi/class_exercise/blob/master/HW3/Binary_Search_Tree_%E6%96%B0%E5%A2%9E%E5%88%AA%E9%99%A4%E6%9F%A5%E8%A9%A2%E4%BF%AE%E6%94%B9%E5%8A%9F%E8%83%BD%E8%AA%AA%E6%98%8E.ipynb):BST. \n\n * [程式碼](https://github.com/tzuchyi/class_exercise/blob/master/HW3/binary_search_tree_06170101.py)\n- week 10:期中考. \n- week 11:Red_Black_Tree. \n- [week 12](https://github.com/tzuchyi/class_exercise/blob/master/HW4/hash_table%E8%A3%BD%E4%BD%9C.ipynb):Hash_Table \n\n * [程式碼](https://github.com/tzuchyi/class_exercise/blob/master/HW4/hash_table_06170101.py)\n- [week 13](https://github.com/tzuchyi/class_exercise/blob/master/HW5/BFS_DFS%E6%B5%81%E7%A8%8B%E5%9C%96.ipynb):BFS/DFS \n\n * [程式碼](https://github.com/tzuchyi/class_exercise/blob/master/HW5/BFS_06170101.pyy)\n- [week 14&15](https://github.com/tzuchyi/class_exercise/blob/master/HW6/Dijkstra_06170101.py):Kruskal&Dijkstra\n * [程式碼](HW6/Dijkstra_06170108.py)\n- week 16:分享區塊鏈作品\n- week 17:期末考\n- week 18:回家投票\n\n\n# HW\n- [HW1](https://github.com/hello02923/lai/blob/master/HW1/readme.md)\n- [HW2](https://github.com/tzuchyi/class_exercise/tree/master/HW2)\n- [HW3](https://github.com/tzuchyi/class_exercise/tree/master/HW3) \n- [HW4](https://github.com/tzuchyi/class_exercise/tree/master/HW4)\n- [HW5](https://github.com/tzuchyi/class_exercise/tree/master/HW5)\n- [HW6](https://github.com/tzuchyi/class_exercise/tree/master/HW6)\n\n# 心得\n演算法蠻酷的,可以把圖像概念以程式碼呈現。如果這學期是李佳蓉老師可能會學得更愉快吧!\n我自己一開始是蠻有興趣的啦,但有一次我只是將排序的順序寫錯寫成大排到小,你要扣我一半程式碼的成績我很不能夠理解,你說你的宗旨是想讓大家自己寫程式碼,如果你有機會可以去看看我的merge/heap sort,我只有聽課理解演算法後就將觀念實體化,我敢賭你找不到任何一個寫的和我一樣。我全部自己花了好幾天寫出來,你卻要我一半分數?對我來說你就是為了扣我分而拿我分。我現在也不是要來和你拿那個一兩分,我只是要和你說一聲抱歉,自從你要扣我那一半分數開始,我就無法對你的行事作風有好感,即使你再舌燦蓮花,對我來說你就是說一套做一套。但也謝謝你讓我上了一課,當我自己理虧,沒有決定權,我就沒有要求你改變遊戲規則的權利,讓我知道我必須更努力做到更好才可以有爭取的權利。\n"
},
{
"alpha_fraction": 0.4350311756134033,
"alphanum_fraction": 0.43711018562316895,
"avg_line_length": 27.611940383911133,
"blob_id": "c8c24e0270bb86c45b8d11dfa389ca6ad6debcab",
"content_id": "c2739044d40c0a97de4239cb0f2a6428393a2b0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1924,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 67,
"path": "/HW6/Dijkstra_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "\nfrom collections import defaultdict \n\nclass Graph(): \n\n def __init__(self, vertices): \n self.V = vertices \n self.graph = [] \n self.graph_matrix = [[0 for column in range(vertices)] \n for row in range(vertices)] \n \n self.dict=defaultdict(list)\n \n def addEdge(self,u,v,w): \n self.dict[w].append([u,v])\n \n\n def Dijkstra(self, s): \n long=len(self.graph)\n nodes = [i for i in range(long)] \n visited=[] \n \n if s in nodes:\n nodes.remove(s)\n visited.append(s)\n \n else:\n return None\n \n dictionary = {str(s):0}\n distance={s:0} \n \n for i in nodes:\n distance[i]=self.graph[s][i] \n\n k=pre=s\n while nodes:\n mid_distance=float('inf')\n for v in visited:\n for d in nodes:\n new_distance = self.graph[s][v]+self.graph[v][d]\n if new_distance < mid_distance and self.graph[v][d]!=0:\n mid_distance=new_distance\n self.graph[s][d]=new_distance \n k=d\n pre=v\n distance[k]=mid_distance \n nodes.remove(k) \n visited.append(k)\n \n for i in range (self.V):\n dictionary[str(i)] = distance[i]\n return dictionary\n \n def Kruskal(self):\n \n result = {}\n val = sorted(self.dict)\n checked = [column for column in range(self.V)] \n \n for i in val:\n for u,v in self.dict[i]:\n if checked[u] == checked[v]:\n pass\n else:\n checked = [checked[u]if x==checked[v] else x for x in checked]\n result[str(u)+'-'+str(v)] = i\n return result\n \n\n"
},
{
"alpha_fraction": 0.40174129605293274,
"alphanum_fraction": 0.41542288661003113,
"avg_line_length": 31.494844436645508,
"blob_id": "956dffe443f6cfb9c1e393690d90138776ac4543",
"content_id": "fa702540b2d33fa59d4b21446a625b12ba29ac1e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3250,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 97,
"path": "/HW4/hash_table_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "from Crypto.Hash import MD5\nclass ListNode:\n def __init__(self, val):\n self.val = val\n self.next = None\n \n \n \nclass MyHashSet:\n def __init__(self, capacity=5):\n self.capacity = capacity\n self.data = [None] * capacity\n \n \n \n \n def add(self, key):\n \n h = MD5.new()\n h.update(key.encode(\"utf-8\"))\n keycode = int(h.hexdigest(),16)\n index = keycode % self.capacity\n for i in range(self.capacity):\n if index==i:\n head=self.data[i]\n if self.data[i]==None:\n \n self.data[i]=ListNode(keycode)\n else:\n while head.next!=None:\n head=head.next\n head.next=ListNode(keycode)\n \n \n\n\n \n \n def remove(self, key):\n h = MD5.new()\n h.update(key.encode(\"utf-8\"))\n keycode = int(h.hexdigest(),16)\n index = keycode % self.capacity\n for i in range(self.capacity):\n if index==i:\n head=self.data[i]\n \n while head!=None:\n if head.val==keycode and head.next==None:\n self.data[i]=None\n head=self.data[i]\n elif head.val==keycode and head.next!=None:\n self.data[i]=head.next\n head=self.data[i]\n else:\n while head.val!=keycode and head.next!=None:\n dad=head\n head=head.next\n if head.val==keycode:\n if head.next==None:\n dad.next=None\n head=None\n else:\n dad.next=head.next\n head=dad\n else:\n head=None\n \n \n \n \n def contains(self, key):\n \n h = MD5.new()\n h.update(key.encode(\"utf-8\"))\n keycode = int(h.hexdigest(),16)\n index = keycode % self.capacity\n for i in range(self.capacity):\n if index==i:\n head=self.data[i]\n if self.data[i]==None:\n return False\n else:\n while head.val!=keycode and head.next!=None:\n head=head.next\n if head.val==keycode:\n return True\n else:\n return False\n \n \n#參考資料\n#### 原理及概念上理解:\n#https://zh.wikipedia.org/wiki/散列函數 \n#http://alrightchiu.github.io/SecondRound/hash-tableintrojian-jie.html \n#https://blog.techbridge.cc/2017/01/21/simple-hash-table-intro/ \n#https://docs.google.com/presentation/d/e/2PACX-1vT1HO9Nl475k2bR0l1x8_Tr4V5Wzx0BEqp9bpmHckvj8kTeJehhYVlOJUDVPhLQm6kjGCJ_sLMSBUw5/pub?start=false&loop=false&delayms=3000&slide=id.p\n \n \n \n\n"
},
{
"alpha_fraction": 0.7228915691375732,
"alphanum_fraction": 0.7983935475349426,
"avg_line_length": 61.25,
"blob_id": "a84c478f85db52db12f1a3cde35cb125adfd6020",
"content_id": "723ba40cd1546e4d6b5df98371b7ae8a954d659a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1723,
"license_type": "no_license",
"max_line_length": 215,
"num_lines": 20,
"path": "/HW2/readme.md",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "## 學習歷程:\n> [mergesort學習歷程](https://nbviewer.jupyter.org/github/tzuchyi/class_exercise/blob/master/HW2/merge_sort_製作歷程.ipynb) \n> [heapsort學習歷程](https://nbviewer.jupyter.org/github/tzuchyi/class_exercise/blob/master/HW2/heap_sort_製作歷程.ipynb) \n\n## merge與heap的排序比較\n> merge與heap平均、最快、最慢花費時間皆相同,皆為O(nlogn) \n> 但在空間上heap所佔的空間會較merge少,因為heap是直接在原本的序列上做更改,並沒有增加新的序列。merge為O(n)、heap為O(1)。 \n> 這個部分因為只聽老師上課講解,誤以為拿掉底部的數是指會生成新的序列,故我製作的heapsort空間複雜度和mergesort相同,皆生成了新的序列。 \n\n## 參考資料: \n> merge_sort演算法了解 https://docs.google.com/presentation/d/e/2PACX-1vToxkEzc1H1RT5MI9G941KQFBC7GO_Efn95wTqXLEdr3LDBSNcQb-M46IOC-_RzZih6IBEwwy3rWQuE/pub?start=false&loop=false&delayms=3000&slide=id.g6504c48e6e_0_17 \n>> 做比較時查找merge資料 https://kopu.chat/2017/08/10/合併排序-merge-sort/ \n \n> heap_sort演算法了解 https://docs.google.com/presentation/d/e/2PACX-1vRAGwnUvg6BcXoML5u9f4gO6YKcz0vXf7bDnPho_S7mG5D0SBR78djt91RKUPMxqNfkVIcu3l5WCXPh/pub?start=false&loop=false&delayms=3000&slide=id.g6504c48e6e_0_17 \n>> 做比較時查找heap資料 http://notepad.yehyeh.net/Content/Algorithm/Sort/Heap/Heap.php \n \n>> 基本上我這兩個演算法都只有依照老師上課講解其進行方式,將步驟轉換成程式碼,因此沒有特別的參考資料。 \n \n> 如何取log https://blog.csdn.net/robertsong2004/article/details/46651503 \n> 如何取整數 http://kuanghy.github.io/2016/09/07/python-trunc\n"
},
{
"alpha_fraction": 0.5016307830810547,
"alphanum_fraction": 0.5120678544044495,
"avg_line_length": 41.58333206176758,
"blob_id": "b2aaf2aa249e75e0e306f3e43d072a781205cf52",
"content_id": "561902fe03bbed45bb07418efd0e236bbf4e4912",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1947,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 36,
"path": "/HW2/heap_sort_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "class Solution(object):\n def checkdad(self,arr,me):#接下來檢查爸爸\n if me !=0:#當我還不是第一個老大的時候我都還要繼續望上檢查爸爸\n dad=(me-1)//2\n if arr[me]>=arr[dad]:\n arr[me],arr[dad]=arr[dad],arr[me]\n me=dad#當我交換之後我就要換成爸爸繼續和我的爸爸檢查\n self.checkdad(arr,me)\n def checkson(self,arr,me):#那就來做一個工具 只和小孩比\n if me <=(len(arr))//2-1:\n left_son=2*me+1\n right_son=2*me+2\n if right_son<=len(arr)-1:#狀況又分成右小孩存在與右小孩不存在\n if arr[me]<=arr[left_son]:\n arr[me],arr[left_son]=arr[left_son],arr[me]\n if arr[me]<=arr[right_son]:\n arr[me],arr[right_son]=arr[right_son],arr[me]\n self.checkdad(arr,me)#當我出現交換位置 我就要回去和爸爸作比較 這是我等一下要寫的另一個工具\n elif arr[me]<=arr[right_son]:\n arr[me],arr[right_son]=arr[right_son],arr[me]\n self.checkdad(arr,me)\n else:#這邊就是右小孩不存在的狀況\n if arr[me]<=arr[left_son]:\n arr[me],arr[left_son]=arr[left_son],arr[me]\n self.checkdad(arr,me)\n def maxheap(self,arr):#現在就是把一個一個要檢查的人拿來檢查\n for i in range(len(arr)):\n self.checkson(arr,i)\n def heap_sort(self,arr):#最後一步\n final=[]#先設定我最後的解答\n while len(arr)!=0:#只要我的陣列還有東西就要繼續跑\n self.maxheap(arr)\n final.append(arr[0])#把最大的數加進去\n arr[0],arr[-1]=arr[-1],arr[0]#交換位置到最後一個\n arr.pop()#再拿掉最後一個數\n return final#就成功啦 萬歲\n"
},
{
"alpha_fraction": 0.4310953915119171,
"alphanum_fraction": 0.4478798508644104,
"avg_line_length": 32.32352828979492,
"blob_id": "7a4a9513e421125c0079574e33e211a4d592c449",
"content_id": "a816dac6360b4413fb64e9c25bcfa13b794ada3b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1316,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 34,
"path": "/HW2/merge_sort_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "import math\nclass Solution(object):\n def comparetwolist(self,a,b):#先製作一個比較兩數列的工具\n c=[]\n while len(a)*len(b)!=0:#當兩數列長度相乘不為零,代表裡面都還有元素存在\n if a[0]>=b[0]:#如果a的第一個數比較大,就把他加到新的c\n c.append(a[0])\n a.pop(0)#再把它拿掉\n else:\n c.append(b[0])#這個是如果b第一個數比較大的情況\n b.pop(0)\n if len(a)==0:#當a長度為零,直接把剩下的b接進c\n c=c+b\n else:\n c=c+a\n return c#最後回傳c\n def merge_sort(self,arr):\n for k in range(1,math.ceil(math.log(len(arr),2))+1):\n\n j=2**k\n final=[]\n n=len(arr)\n if len(arr)%j==0:\n for i in range(n//j): \n final=final+self.comparetwolist(arr[j*i:j*i+j//2],arr[j*i+j//2:j*i+j])\n arr=final\n else:\n\n for i in range(n//j):\n final=final+self.comparetwolist(arr[j*i:j*i+j//2],arr[j*i+j//2:j*i+j])\n lost=(n//j)*j\n final=final+self.comparetwolist(arr[(n//j)*j:(n//j)*j+j//2],arr[(n//j)*j+j//2:])\n arr=final\n return arr"
},
{
"alpha_fraction": 0.3329848051071167,
"alphanum_fraction": 0.3375156819820404,
"avg_line_length": 34.90726852416992,
"blob_id": "bb783ceb8ebe2c0d6b6a0dfd27c63e6acbe168ca",
"content_id": "696bb110e7a408b0eea40a95dce4ee377422a05f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14568,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 399,
"path": "/HW3/binary_search_tree_06170101.py",
"repo_name": "tzuchyi/class_exercise",
"src_encoding": "UTF-8",
"text": "class TreeNode(object):\n def __init__(self,x):\n self.val = x\n self.left = None\n self.right = None\n \n\nclass Solution(object):\n\n def inorder(self, root):\n if root is None: \n return \n else:\n self.inorder(root.left) \n print(root.val)\n self.inorder(root.right)\n# def insert_dad(self, root, val):\n# \n# temp=root\n# while temp!=None:\n# \n# if temp.val>= val:\n# temp_dad=temp\n# temp=temp.left\n# else :\n# temp_dad=temp\n# temp=temp.right\n#\n# temp=TreeNode(val)\n# if temp_dad.val>=val:\n# temp_dad.left=temp\n# else:\n# temp_dad.right=temp\n# return temp_dad\n\n def insert(self, root, val):\n \n temp=root\n while temp!=None:\n \n if temp.val>= val:\n temp_dad=temp\n temp=temp.left\n else :\n temp_dad=temp\n temp=temp.right\n \n temp=TreeNode(val)\n if temp_dad.val>=val:\n temp_dad.left=temp\n else:\n temp_dad.right=temp\n return temp\n \n \n def delete(self, root, target):\n \n temp=root\n temp_dad=None\n \n \n while temp!=None:\n \n if temp.val>target:\n temp_dad=temp\n temp=temp.left\n\n elif temp.val<target:\n temp_dad=temp\n temp=temp.right \n \n elif temp.val==target:\n \n if temp_dad is None:\n if temp.left==None:\n root=temp.right\n temp=root\n elif temp.right==None:\n root=temp.left\n temp=root\n else:\n # elif temp.right!=None and temp.left!=None :\n if temp.left.right==None: \n \n temp.left.right=temp.right\n root=temp.left\n temp=root\n \n else:\n \n left_max=temp.left\n left_max_dad=temp\n while left_max.right!=None:\n left_max_dad=left_max\n left_max=left_max.right\n if left_max.left==None:\n left_max_dad.right=None\n else:\n left_max_dad.right=left_max.left\n\n # left_max_dad.right=None\n left_max.left=temp.left\n left_max.right=temp.right\n root=left_max\n temp=root\n\n # root=temp.left\n \n # dad=self.insert_dad(root, temp.right.val)\n # dad.left=temp.left\n # dad.right=temp.right\n # temp=root\n \n \n else:\n if temp_dad.val>target:\n \n if temp.left==None and temp.right==None:\n temp_dad.left=None\n temp=None\n elif temp.left==None:\n temp_dad.left=temp.right\n temp=temp.right\n elif temp.right==None:\n temp_dad.left=temp.left\n temp=temp.left\n else:\n if temp.left.right==None:\n temp_dad.left=temp.left\n temp_dad.left.right=temp.right\n temp=root\n else:\n\n left_max=temp.left\n left_max_dad=temp\n while left_max.right!=None:\n left_max_dad=left_max\n left_max=left_max.right\n if left_max.left==None:\n left_max_dad.right=None\n else:\n left_max_dad.right=left_max.left\n\n #left_max_dad.right=None\n left_max.left=temp.left\n left_max.right=temp.right\n\n if temp_dad.val>target:\n temp_dad.left=left_max\n else:\n temp_dad.right=left_max\n temp=root\n else:\n if temp.left==None and temp.right==None:\n temp_dad.right=None\n temp=None\n elif temp.left==None:\n temp_dad.right=temp.right\n temp=temp.right\n elif temp.right==None:\n temp_dad.right=temp.left\n temp=temp.left\n else:\n if temp.left.right==None:\n temp_dad.right=temp.left\n temp_dad.right.right=temp.right\n temp=root\n else:\n\n left_max=temp.left\n left_max_dad=temp\n while left_max.right!=None:\n left_max_dad=left_max\n left_max=left_max.right\n if left_max.left==None:\n left_max_dad.right=None\n else:\n left_max_dad.right=left_max.left\n\n #left_max_dad.right=None\n left_max.left=temp.left\n left_max.right=temp.right\n\n if temp_dad.val>target:\n temp_dad.left=left_max\n else:\n temp_dad.right=left_max\n temp=root\n \n \n \n \n \n \n \n return root\n \n # temp=root.left\n \n \n # while temp!=None:\n # temp_dad=root\n # if temp.val==target:\n # if temp.left==None & temp.right==None:\n # temp_dad.left=None\n # elif temp.left==None:\n # temp_dad.left=temp.right\n # elif temp.right==None:\n # temp_dad.left=temp.left\n # else:\n # temp_dad.left=temp.left\n # temp_dad.left.right=temp.right\n # temp=temp.left\n \n \n \n \n \n def search(self, root, target):\n \n temp=root\n temp_dad=None\n \n \n while temp!=None:\n \n if temp.val>target:\n temp_dad=temp\n temp=temp.left\n\n elif temp.val<target:\n temp_dad=temp\n temp=temp.right \n \n else:\n return temp\n return None\n\n \n \n def modify(self, root, target, new_val):\n k=0\n temp=root\n temp_dad=None\n \n \n while temp!=None:\n \n if temp.val>target:\n temp_dad=temp\n temp=temp.left\n\n elif temp.val<target:\n temp_dad=temp\n temp=temp.right \n \n elif temp.val==target:\n k=k+1\n \n if temp_dad is None:\n if temp.left==None:\n root=temp.right\n temp=root\n elif temp.right==None:\n root=temp.left\n temp=root\n else:\n # elif temp.right!=None and temp.left!=None :\n if temp.left.right==None: \n \n temp.left.right=temp.right\n root=temp.left\n temp=root\n \n else:\n \n left_max=temp.left\n left_max_dad=temp\n while left_max.right!=None:\n left_max_dad=left_max\n left_max=left_max.right\n if left_max.left==None:\n left_max_dad.right=None\n else:\n left_max_dad.right=left_max.left\n\n # left_max_dad.right=None\n left_max.left=temp.left\n left_max.right=temp.right\n root=left_max\n temp=root\n\n # root=temp.left\n \n # dad=self.insert_dad(root, temp.right.val)\n # dad.left=temp.left\n # dad.right=temp.right\n # temp=root\n \n \n else:\n if temp_dad.val>target:\n \n if temp.left==None and temp.right==None:\n temp_dad.left=None\n temp=None\n elif temp.left==None:\n temp_dad.left=temp.right\n temp=temp.right\n elif temp.right==None:\n temp_dad.left=temp.left\n temp=temp.left\n else:\n if temp.left.right==None:\n temp_dad.left=temp.left\n temp_dad.left.right=temp.right\n temp=root\n else:\n\n left_max=temp.left\n left_max_dad=temp\n while left_max.right!=None:\n left_max_dad=left_max\n left_max=left_max.right\n if left_max.left==None:\n left_max_dad.right=None\n else:\n left_max_dad.right=left_max.left\n\n #left_max_dad.right=None\n left_max.left=temp.left\n left_max.right=temp.right\n\n if temp_dad.val>target:\n temp_dad.left=left_max\n else:\n temp_dad.right=left_max\n temp=root\n else:\n if temp.left==None and temp.right==None:\n temp_dad.right=None\n temp=None\n elif temp.left==None:\n temp_dad.right=temp.right\n temp=temp.right\n elif temp.right==None:\n temp_dad.right=temp.left\n temp=temp.left\n else:\n if temp.left.right==None:\n temp_dad.right=temp.left\n temp_dad.right.right=temp.right\n temp=root\n else:\n\n left_max=temp.left\n left_max_dad=temp\n while left_max.right!=None:\n left_max_dad=left_max\n left_max=left_max.right\n if left_max.left==None:\n left_max_dad.right=None\n else:\n left_max_dad.right=left_max.left\n\n #left_max_dad.right=None\n left_max.left=temp.left\n left_max.right=temp.right\n\n if temp_dad.val>target:\n temp_dad.left=left_max\n else:\n temp_dad.right=left_max\n temp=root\n \n \n while k>0:\n self.insert(root,new_val)\n k=k-1\n return root\n \n \n\n\n\n \n \n#參考資料:\n#這邊是我對於BST認識的資料來源\n\n#10/28-11/3 Binary Tree上課講義:\n#https://docs.google.com/presentation/d/e/2PACX-1vQgUh73yvSdxAvMH50DHWJ5lsCX8-daMxtoltU9rYW7xCmqYz2A1wOv0Vcx_F9KO5ZUvZBv3IF1TjGi/pub?start=false&loop=false&delayms=3000&slide=id.p\n#11/11-11/17 Binary Search Tree上課講義:\n#https://docs.google.com/presentation/d/e/2PACX-1vSC3P8sGElP48mJTjqT309470SmTFBwJXWsU9hTX2hg5tVpiG4yC703qA7ibPep-Qakmm2Mw_F-ScZh/pub?start=false&loop=false&delayms=3000&slide=id.p\n\n#程式碼的作法則是依照之前linked list的邏輯做出,\n#沒有參考其他網路上現成的程式碼。\n#針對刪除功能的邏輯有和簡大為做討論,\n#故若刪除功能程式碼與簡大為相似,只因邏輯相同,絕無抄襲。\n \n \n\n"
}
] | 10 |
avi09/AdWords_Placement_On_Customer_Bids | https://github.com/avi09/AdWords_Placement_On_Customer_Bids | 693f96fd91fbd5f3216ded5daa12b2d5f45d3c62 | c11ccebe71916fb9a71b9d02972a6c4db18b98e1 | c82bb01e1dec1599b77b1c24be57a345a8826853 | refs/heads/master | 2022-12-18T12:27:20.576110 | 2020-09-26T07:55:46 | 2020-09-26T07:55:46 | 285,454,760 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.779026210308075,
"alphanum_fraction": 0.8089887499809265,
"avg_line_length": 65.75,
"blob_id": "93239edadd2f25690fa1174c386163f63550ba5c",
"content_id": "b9cbe6f68f644e93b73ba331712270673cddf203",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 267,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 4,
"path": "/README.md",
"repo_name": "avi09/AdWords_Placement_On_Customer_Bids",
"src_encoding": "UTF-8",
"text": "# AdWords_Placement_On_Customer_Bids\nThis is a project about mapping the adiverisement bids to user search queries to calculate the maximum revenue. \n\nGreedy, MSVV and Balanced algorithms are used. The maximum revenue found was 16721 and competitive ratio was 0.99.\n"
},
{
"alpha_fraction": 0.4337975084781647,
"alphanum_fraction": 0.4725041389465332,
"avg_line_length": 25.627450942993164,
"blob_id": "131bf20554b46714758cf24e0c42bef0c2bf8c57",
"content_id": "a791bf65a96ca6068a1f5cb3927ef3f1a938a83d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4237,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 153,
"path": "/adwords.py",
"repo_name": "avi09/AdWords_Placement_On_Customer_Bids",
"src_encoding": "UTF-8",
"text": "import pandas as dd\r\nimport random\r\n\r\nfrom math import e\r\nimport sys\r\n\r\nif len(sys.argv)==1:\r\n print('No arguments')\r\nelif sys.argv[1]=='greedy' or sys.argv[1]=='mssv' or sys.argv[1]=='balanced':\r\n print('Running algorithm for '+sys.argv[1]+\" now. This may take some time\")\r\n\r\ndata=dd.read_csv('./bidder_dataset.csv')\r\nq=dd.read_csv('queries.txt',header=None)\r\n\r\ndef search(q,data):\r\n an=[]\r\n for i in range(len(data)):\r\n if q==data[i][1]:\r\n an.append([data[i][0],data[i][2]])\r\n return an\r\n\r\ndata1=[]\r\nfor i in range(len(data.iloc[:,1])):\r\n data1.append([data.loc[i,'Advertiser'],data.iloc[i,1],data.iloc[i,2]])\r\n\r\nbudget={}\r\nfor i in range(len(data1)):\r\n if str(data.loc[i,'Budget'])!='nan':\r\n budget[data1[i][0]]=data.loc[i,'Budget']\r\nbudget1=dict(zip(budget.keys(),budget.values()))\r\nrv=0\r\nrandom.seed(0)\r\nq1=[]\r\nfor i in range(len(q)):\r\n q1.append(q.iloc[i,0])\r\n\r\n\r\n\r\n\r\n\r\ndef greedy(budget,budget1,q1,data1):\r\n budget1=dict(zip(budget.keys(),budget.values()))\r\n rv=0\r\n for i in range(len(q1)):\r\n x=str(q1[i])\r\n x=search(x,data1)\r\n x=sorted(x,key=lambda x:x[1])\r\n mx=x[-1][1]\r\n y=[]\r\n cn=-1\r\n for k in range(len(x)):\r\n if x[k][1]==mx:\r\n cn+=1\r\n cn1=x[(len(x)-cn-1):]\r\n cn1=sorted(cn1,key=lambda x:x[0],reverse=True)\r\n x=x[:len(x)-cn-1]+cn1\r\n for j in reversed(x):\r\n if budget1[j[0]]-j[1]>0:\r\n budget1[j[0]]=budget1[j[0]]-j[1]\r\n rv+=j[1]\r\n break\r\n return rv\r\n\r\ndef msvv(budget,budget1,q1,data1):\r\n budget1=dict(zip(budget.keys(),budget.values()))\r\n rv=0\r\n for i in range(len(q1)):\r\n x=str(q1[i])\r\n x=search(x,data1)\r\n for j in x:\r\n j.append(j[1]*(1-(e**(((budget[j[0]]-budget1[j[0]])/budget[j[0]])-1))))\r\n x=sorted(x,key=lambda x:x[2])\r\n mx=x[-1][2]\r\n xy=-1\r\n for k in range(len(x)):\r\n if x[k][2]==mx:\r\n xy+=1\r\n xx=x[(len(x)-xy-1):]\r\n xx=sorted(xx,key=lambda x:x[0],reverse=True)\r\n x=x[:len(x)-xy-1]+xx\r\n for j in reversed(x):\r\n if budget1[j[0]]-j[1]>0:\r\n budget1[j[0]]=budget1[j[0]]-j[1]\r\n rv+=j[1]\r\n break\r\n return rv\r\n\r\ndef balanced(budget,budget1,q1,data1):\r\n budget1=dict(zip(budget.keys(),budget.values()))\r\n rv=0\r\n for i in range(len(q1)):\r\n x=str(q1[i])\r\n x=search(x,data1)\r\n for j in x:\r\n j.append(budget1[j[0]])\r\n x=sorted(x,key=lambda x:x[2])\r\n mx=x[-1][2]\r\n xy=-1\r\n for k in range(len(x)):\r\n if x[k][2]==mx:\r\n xy+=1\r\n xx=x[(len(x)-xy-1):]\r\n xx=sorted(xx,key=lambda x:x[0],reverse=True)\r\n x=x[:len(x)-xy-1]+xx\r\n for j in reversed(x):\r\n if budget1[j[0]]-j[1]>0:\r\n budget1[j[0]]=budget1[j[0]]-j[1]\r\n rv+=j[1]\r\n break\r\n return rv\r\n\r\nx=0\r\nif sys.argv[1]=='greedy':\r\n for i in range(-1,100):\r\n if i==-1:\r\n print(\"Original query\")\r\n else:\r\n random.seed(i)\r\n random.shuffle(q1)\r\n y=greedy(budget,budget1,q1,data1)\r\n if i==-1:\r\n print(\"For greedy, iteration 1 - \"+str(y))\r\n else:\r\n x+=y\r\n print(\"The ratio - \"+ str(((x/sum(budget.values()))/100)))\r\nx=0\r\nif sys.argv[1]=='mssv':\r\n for i in range(-1,100):\r\n if i==-1:\r\n print(\"Original query\")\r\n random.seed(i)\r\n random.shuffle(q1)\r\n y=msvv(budget,budget1,q1,data1)\r\n if i==-1:\r\n print(\"For mssv, iteration 1 - \"+str(y))\r\n else:\r\n x+=y\r\n print(\"The ratio \"+str((x/sum(budget.values()))/100))\r\n\r\nx=0\r\nif sys.argv[1]=='balanced':\r\n for i in range(-1,100):\r\n if i==-1:\r\n print(\"Original query\")\r\n else:\r\n random.seed(i)\r\n random.shuffle(q1)\r\n y=balanced(budget,budget1,q1,data1)\r\n if i==-1:\r\n print(\"For balanced, iteration 1 - \"+str(y))\r\n else:\r\n x+=y\r\n print(\"The ratio \"+str(((x/sum(budget.values()))/100)))\r\n\r\n\r\n\r\n\r\n\r\n"
}
] | 2 |
Arsha-Meenu/2021_Project_django_river_fakejira | https://github.com/Arsha-Meenu/2021_Project_django_river_fakejira | 33329753221d02b839d8c88338bc7cbea69f36df | 9414ac7609e938dedec27dbd586160a411bf01b2 | a18059f1ea225c5838ea99417ee7ce2c1183a319 | refs/heads/master | 2023-03-16T03:00:04.088076 | 2021-03-11T13:42:32 | 2021-03-11T13:42:32 | 346,710,195 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7156862616539001,
"alphanum_fraction": 0.7156862616539001,
"avg_line_length": 18.399999618530273,
"blob_id": "79220f7c90dfe21ae99850d16cc271fafb068c16",
"content_id": "f55aece439a3620448ccaa681c44c9ab47f38c4e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 102,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 5,
"path": "/dj_river_app/apps.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\r\n\r\n\r\nclass DjRiverAppConfig(AppConfig):\r\n name = 'dj_river_app'\r\n"
},
{
"alpha_fraction": 0.5804701447486877,
"alphanum_fraction": 0.6175406575202942,
"avg_line_length": 37.5,
"blob_id": "9105257e7a69c1251d1397721dbef2b5a5a87ed5",
"content_id": "2688d9f3b880bd82725fd8a0ae6e3d733e77aa90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1106,
"license_type": "no_license",
"max_line_length": 185,
"num_lines": 28,
"path": "/dj_river_app/migrations/0001_initial.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.13 on 2021-02-23 03:24\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport river.models.fields.state\r\nimport uuid\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n ('river', '0002_auto_20210222_1224'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='Ticket',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('no', models.CharField(default=uuid.uuid4, editable=False, max_length=50, unique=True, verbose_name='Ticket Number')),\r\n ('subject', models.CharField(max_length=100, verbose_name='Subject')),\r\n ('description', models.TextField(blank=True, max_length=500, null=True, verbose_name='Description')),\r\n ('status', river.models.fields.state.StateField(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='river.State')),\r\n ],\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.5413436889648438,
"alphanum_fraction": 0.6033591628074646,
"avg_line_length": 31.65217399597168,
"blob_id": "649719453a168d9725eab79d11a3875cd5fdbb43",
"content_id": "be6b771bf1cfeb562c34bd9a082c775a9eb9574e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 774,
"license_type": "no_license",
"max_line_length": 177,
"num_lines": 23,
"path": "/dj_river_app/migrations/0004_mymodel.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.13 on 2021-03-01 04:51\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport river.models.fields.state\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('river', '0002_auto_20210222_1224'),\r\n ('dj_river_app', '0003_auto_20210223_1217'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='MyModel',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('my_state_field', river.models.fields.state.StateField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='river.State')),\r\n ],\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.6639741659164429,
"alphanum_fraction": 0.6785137057304382,
"avg_line_length": 30.473684310913086,
"blob_id": "5fa840e29f54ad92f90f92226dd6168cc3bf904a",
"content_id": "0442532c9cee62c7ef5d8123d57cb19be34872e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 619,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 19,
"path": "/dj_river_app/models.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\n\r\n# Create your models here.\r\nimport uuid\r\n\r\n# Create your models here.\r\nfrom river.models.fields.state import StateField\r\n\r\n\r\nclass Ticket(models.Model):\r\n no = models.CharField(\"Ticket Number\", max_length=50, default=uuid.uuid4, null=False, blank=False, editable=False,\r\n unique=True)\r\n subject = models.CharField(\"Subject\", max_length=100, null=False, blank=False)\r\n description = models.TextField(\"Description\", max_length=500, null=True, blank=True)\r\n\r\n status = StateField(editable=False)\r\n\r\n def natural_key(self):\r\n return self.no\r\n\r\n"
},
{
"alpha_fraction": 0.7029449343681335,
"alphanum_fraction": 0.7144686579704285,
"avg_line_length": 26.925926208496094,
"blob_id": "7616db8092cffd7cf115bac30b44c5f9d401432b",
"content_id": "5e9bcdc939e0acffa14acd7ec7090bac038cf8dc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 781,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 27,
"path": "/dj_river_app/views.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\r\nfrom django.http import HttpResponse\r\n\r\n# Create your views here.\r\ndef Sample(request):\r\n return HttpResponse(' Django River Example')\r\n\r\n\r\n\r\n# django river\r\nfrom django.urls import reverse\r\nfrom django.shortcuts import get_object_or_404, redirect\r\n\r\nfrom river.models import State\r\n\r\nfrom dj_river_app.models import Ticket\r\n\r\n\r\ndef approve_ticket(request, ticket_id, next_state_id=None):\r\n ticket = get_object_or_404(Ticket, pk=ticket_id)\r\n next_state = get_object_or_404(State, pk=next_state_id)\r\n\r\n try:\r\n ticket.river.status.approve(as_user=request.user, next_state=next_state)\r\n return redirect(reverse('admin:dj_river_app_ticket_changelist'))\r\n except Exception as e:\r\n return HttpResponse(e.message)\r\n"
},
{
"alpha_fraction": 0.6847184896469116,
"alphanum_fraction": 0.6868632435798645,
"avg_line_length": 39.488887786865234,
"blob_id": "4c8b211a6cc95123cd08e617da55b73fa7ca2458",
"content_id": "cf85b78121aecea8351d9645eab1b5ba3828fd69",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1865,
"license_type": "no_license",
"max_line_length": 233,
"num_lines": 45,
"path": "/dj_river_app/admin.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nimport river_admin\r\n\r\n\r\n# Register your models here.\r\nfrom django.urls import reverse\r\nfrom django.utils.safestring import mark_safe\r\n\r\nfrom dj_river_app.models import Ticket\r\n\r\n# here shows the river action functionality ie, what will happen after click on each button in the given actions\r\ndef create_river_button(obj, transition_approval):\r\n approve_ticket_url = reverse('approve_ticket', kwargs={'ticket_id': obj.pk, 'next_state_id': transition_approval.transition.destination_state.pk})\r\n return f\"\"\"\r\n <input\r\n type=\"button\"\r\n style=\"margin:2px;2px;2px;2px;\"\r\n value=\"{transition_approval.transition.source_state} >> {transition_approval.transition.destination_state}\"\r\n onclick=\"location.href=\\'{approve_ticket_url}\\'\"\r\n />\r\n \"\"\"\r\nclass TicketAdmin(admin.ModelAdmin):\r\n list_display = ('no', 'subject', 'description', 'status', 'river_actions')\r\n\r\n def get_list_display(self, request):\r\n self.user = request.user\r\n return super(TicketAdmin, self).get_list_display(request)\r\n\r\n def river_actions(self, obj):\r\n content = \"\"\r\n for transition_approval in obj.river.status.get_available_approvals(as_user=self.user):# get_available_approvals :to fetch all available approvals waitiong for a specific user according to given source and destination states.\r\n content += create_river_button(obj, transition_approval)\r\n\r\n return mark_safe(content) #marksafe: mark a string as safe for output purpose.\r\n\r\n\r\nadmin.site.register(Ticket, TicketAdmin)\r\n\r\nclass TicketRiverAdmin(river_admin.RiverAdmin):\r\n name = \"Django River Fakejira\"\r\n # icon = \"mdi-ticket-account\"\r\n list_displays = ['pk', 'no', 'subject', 'description', 'status']\r\n \r\n\r\nriver_admin.site.register(Ticket, \"status\", TicketRiverAdmin)"
},
{
"alpha_fraction": 0.4658227860927582,
"alphanum_fraction": 0.5468354225158691,
"avg_line_length": 19.94444465637207,
"blob_id": "ee6e66dc3e5b8c76b48dba61cb0cb90f26cdae13",
"content_id": "8260e453e72fcf29cc8bbdc09b7abff5df422e2e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 395,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 18,
"path": "/dj_river_app/migrations/0003_auto_20210223_1217.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.13 on 2021-02-23 06:47\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('dj_river_app', '0002_auto_20210223_1157'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameField(\r\n model_name='ticket',\r\n old_name='my_state_field',\r\n new_name='status',\r\n ),\r\n ]\r\n"
},
{
"alpha_fraction": 0.4918566644191742,
"alphanum_fraction": 0.5570032596588135,
"avg_line_length": 17.1875,
"blob_id": "f7e80f3321d67f27c175807e48be7cb51ad113c3",
"content_id": "384cb80e61b4b44a0c4de4d3b6c01080c29cf3e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 307,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 16,
"path": "/dj_river_app/migrations/0005_delete_mymodel.py",
"repo_name": "Arsha-Meenu/2021_Project_django_river_fakejira",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.13 on 2021-03-01 08:21\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('dj_river_app', '0004_mymodel'),\r\n ]\r\n\r\n operations = [\r\n migrations.DeleteModel(\r\n name='MyModel',\r\n ),\r\n ]\r\n"
}
] | 8 |
Patrickskiba/AmazonCategoryTopSellers | https://github.com/Patrickskiba/AmazonCategoryTopSellers | ed4c76ea263f5e68755d55407b158ce062d03ab5 | d06a0a8d2774c55480683ecc105949003c934b5a | 01a1a9b9d7cf6f92be553ac0ab4dedcce4cabf7e | refs/heads/master | 2021-01-17T10:13:36.646997 | 2017-03-05T23:05:49 | 2017-03-05T23:05:49 | 84,009,070 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5941498875617981,
"alphanum_fraction": 0.6007769703865051,
"avg_line_length": 37.72566223144531,
"blob_id": "5dc0282c4881c4bd63af2154ee1a62ea71f8c3b7",
"content_id": "48d3d8500ca805d2b35520b4df09db278347e1f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4376,
"license_type": "no_license",
"max_line_length": 201,
"num_lines": 113,
"path": "/amazonsearchAPI.py",
"repo_name": "Patrickskiba/AmazonCategoryTopSellers",
"src_encoding": "UTF-8",
"text": "import time\nimport amazonproduct\nimport bleach\nimport datetime\nfrom goodreads import client\nfrom lxml import etree\n\n\nclass TopTenSellerASINs(object):\n\n def __init__(self, node):\n self._node = node\n\n def __request_top_ten_sellers_by_node(self, nodeid):\n api = amazonproduct.API(locale='us')\n return api.call(\n Operation='BrowseNodeLookup',\n BrowseNodeId=nodeid,\n ResponseGroup='TopSellers',\n Sort='salesrank')\n\n def __extract_asin_from_response(self, AmazonResponse):\n listofitems = []\n for item in range(0, 10):\n listofitems.append(AmazonResponse['BrowseNodes']['BrowseNode'][\n 'TopSellers']['TopSeller'][item]['ASIN'])\n return listofitems\n\n def list_of_asin_numbers(self):\n return self.__extract_asin_from_response(\n self.__request_top_ten_sellers_by_node(self._node))\n\n\nclass ItemDetails(object):\n\n def __init__(self, asinList):\n self._asinList = asinList\n self.gc = client.GoodreadsClient(os.environ['GOOD_READS_KEY'], os.environ['GOOD_READS_SECRET'])\n\n def get_top_ten_item_details(self):\n itemdetails = []\n for asinVal in self._asinList:\n itemdetails.append(self.__request_item_detail(asinVal))\n return itemdetails\n\n def __request_item_detail(self, asin):\n api = amazonproduct.API(locale='us')\n item = api.item_lookup(ItemId = asin, ResponseGroup = \"Large\")\n time.sleep(2)\n return self.__extract_only_useful_info(item)\n\n def __extract_only_useful_info(self, itemDetails):\n itemProfile = {}\n itemProfile['ASIN'] = (itemDetails['Items']['Item']['ASIN'])\n itemProfile['DetailPageURL'] = (itemDetails['Items']['Item']['DetailPageURL'])\n itemProfile['ImgUrl'] = (itemDetails['Items']['Item']['LargeImage']['URL'])\n itemProfile['Title'] = str(itemDetails['Items']['Item']['ItemAttributes']['Title']).replace(': A novel', '').replace(': A Novel', '')\n itemProfile['Author'] = (itemDetails['Items']['Item']['ItemAttributes']['Author'])\n try:\n itemProfile['ISBN'] = (itemDetails['Items']['Item']['ItemAttributes']['ISBN'])\n except:\n itemProfile['ISBN'] = itemProfile['ASIN']\n try:\n itemProfile['ProductDescription'] = self.__clean_description(self.__good_reads_desc(itemProfile['ISBN'], itemProfile['Title']))\n except:\n browse(locals)\n itemProfile['ProductDescription'] = (itemDetails['Items']['Item']['DetailPageURL'])\n\n\n return itemProfile\n\n def __clean_description(self, desc):\n with_tags_description = str((desc).encode('ascii', 'ignore').decode('ascii'))\n description = bleach.clean(with_tags_description, tags=['br','p'], strip=True)\n return description\n\n def __good_reads_desc(self, isbn, title):\n try:\n time.sleep(1)\n book = self.gc.search_books(str(isbn))\n browse(book)\n except:\n time.sleep(1)\n book = self.gc.search_books(str(title))\n browse(book)\n return book[0].description\n\n\n\n\nclass PageGenerator(object):\n def __init__(self, item_details):\n self._item_details = item_details\n\n def write_page_text(self):\n for item in range(0, 10):\n text = \"--- \\nlayout: post \\n\"\n text += (\"title: \" + str(self._item_details[item]['Title']).replace(':', '-') + \"\\n\")\n text += (\"author: \" + self._item_details[item]['Author'] + \"\\n\")\n text += (\"img: \" + self._item_details[item]['ImgUrl'] + \"\\n--- \\n\")\n text += (self._item_details[item]['ProductDescription'] + \"\\n\")\n text += (\"<br/><br/> <a href=\\\"\" + self._item_details[item]['DetailPageURL'] + \"\\\"><img src=\\\"https://images-na.ssl-images-amazon.com/images/G/01/associates/remote-buy-box/buy1.gif\\\"></a>\")\n print(text)\n with open(\"/home/vagrant/bookshopguide.github.io/_posts/\" + datetime.datetime.now().strftime(\"%Y-%m-%d\") + \"-\" +str(self._item_details[item]['Title']) + \".md\", \"w\") as file_:\n file_.write(text)\n\n\n\nps4TopSellerAsins = TopTenSellerASINs(4919323011)\nps4ItemDetails = ItemDetails(ps4TopSellerAsins.list_of_asin_numbers())\n\nps4Items = PageGenerator(ps4ItemDetails.get_top_ten_item_details())\nps4Items.write_page_text()\n"
}
] | 1 |
nhlinh99/SSD | https://github.com/nhlinh99/SSD | 414f74e9f9bc154ca323f94ab2f8562e48e0d8ba | 223dc14b780748ef627201a52cceabfde65e34fc | 1218cf28d9eecc2e4a3554f197fd911565b4f033 | refs/heads/master | 2023-07-13T04:17:57.775414 | 2021-08-19T04:18:47 | 2021-08-19T04:18:47 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5745577216148376,
"alphanum_fraction": 0.6015164256095886,
"avg_line_length": 30.236841201782227,
"blob_id": "aa269d56b7034c335109f7b41cdf547e9471f713",
"content_id": "7e25a517becab6e2d2767bf0eff7e212213776b4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1187,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 38,
"path": "/ssd/utils/misc.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "import errno\nimport os\nfrom PIL import Image\n\n\ndef str2bool(s):\n return s.lower() in ('true', '1')\n\n\ndef mkdir(path):\n try:\n os.makedirs(path)\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n\ndef reorient_image(im):\n try:\n image_exif = im._getexif()\n image_orientation = image_exif[274]\n if image_orientation in (2,'2'):\n return im.transpose(Image.FLIP_LEFT_RIGHT)\n elif image_orientation in (3,'3'):\n return im.transpose(Image.ROTATE_180)\n elif image_orientation in (4,'4'):\n return im.transpose(Image.FLIP_TOP_BOTTOM)\n elif image_orientation in (5,'5'):\n return im.transpose(Image.ROTATE_90).transpose(Image.FLIP_TOP_BOTTOM)\n elif image_orientation in (6,'6'):\n return im.transpose(Image.ROTATE_270)\n elif image_orientation in (7,'7'):\n return im.transpose(Image.ROTATE_270).transpose(Image.FLIP_TOP_BOTTOM)\n elif image_orientation in (8,'8'):\n return im.transpose(Image.ROTATE_90)\n else:\n return im\n except (KeyError, AttributeError, TypeError, IndexError):\n return im\n"
},
{
"alpha_fraction": 0.6129996180534363,
"alphanum_fraction": 0.6236459016799927,
"avg_line_length": 33.32692337036133,
"blob_id": "324968e5c26066522cb878c56fd1fe3d71083195",
"content_id": "8e21bcfa93982301f677fbac9a10de8531ac9307",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5354,
"license_type": "permissive",
"max_line_length": 191,
"num_lines": 156,
"path": "/convert_pascalvoc_dataset/data_preprocess.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "from argparse import ArgumentParser\nfrom tqdm import tqdm\nimport numpy as np\nimport random\nimport json\nimport math\nimport os\n\n\nALLOWED_EXTENSIONS = [\".jpg\", \".jpeg\", \".png\", \".gif\"]\n\n\ndef parse_inputs():\n \"\"\" Parser function to take care of the inputs \"\"\"\n parser = ArgumentParser(description='Argument: python data_preprocess.py <data_direction> <output_annotation_path> <test_ratio>')\n parser.add_argument('data_dir', type=str,\n help='Enter path to data direction.')\n parser.add_argument('output_annotation_path', type=str,\n help='Enter the path of the output of annotation files.')\n parser.add_argument('test_ratio', default=0.1, type=float,\n help='Test Ratio.')\n args = parser.parse_args()\n\n return (args.data_dir, args.output_annotation_path, args.test_ratio)\n\n\ndef distance_two_points(p1, p2):\n return math.sqrt(math.pow(p1[0] - p2[0], 2) + math.pow(p1[1] - p2[1], 2))\n\n\ndef get_center_point(points):\n center_point = [0, 0]\n for point in points:\n center_point[0] += point[0] / len(points)\n center_point[1] += point[1] / len(points)\n return np.array(center_point)\n\n\ndef shrinking_points(points, change_pixel):\n\n center_point = get_center_point(points)\n distance_from_corner_to_center = distance_two_points(points[0], center_point)\n\n increase_ratio = (change_pixel + distance_from_corner_to_center) / distance_from_corner_to_center\n new_points = []\n for point in points:\n new_point = (np.array(point) - center_point) * increase_ratio + center_point\n new_points.append(new_point.tolist())\n\n return new_points\n\n\ndef train_test_split(image_dir, test_ratio, val_ratio = 0.2):\n\n types_id = [\"cccd\", \"cmnd\"]\n types_face = [\"back\", \"top\"]\n \n folder_dirs = []\n for id in types_id:\n for face in types_face:\n folder_dirs.append(os.path.join(image_dir + \"/\", id + \"/\", face))\n\n images = []\n for folder in folder_dirs:\n for tail_img in ALLOWED_EXTENSIONS:\n images.extend([os.path.join(folder + \"/\", f) for f in os.listdir(folder) if tail_img in f])\n\n random.seed(1234)\n random.shuffle(images)\n\n batch_size = 32\n \n train_num = int(len(images) * (1 - val_ratio - test_ratio) // batch_size * batch_size)\n val_num = int(len(images) * val_ratio)\n\n images_train = images[:train_num]\n images_val = images[train_num:train_num + val_num]\n images_test = images[train_num + val_num:]\n\n return images_train, images_val, images_test\n\n\ndef parse_annotation(data_dir, image_list, output_annotation):\n\n json_file = []\n for tail_img in ALLOWED_EXTENSIONS:\n json_file.extend([f.replace(tail_img, \".json\") for f in image_list if tail_img in f])\n \n result_str = []\n print(\"Getting Annotations {}...\".format(output_annotation))\n for f in tqdm(json_file):\n\n base_folder_path = os.path.dirname(f)\n \n fi = open(os.path.join(data_dir, f), \"r\", encoding = \"utf-8\")\n data = json.load(fi)\n \n str_data = []\n str_data.append(os.path.join(base_folder_path + \"/\", data[\"imagePath\"]))\n annotations = data[\"shapes\"]\n width = data[\"imageWidth\"]\n height = data[\"imageHeight\"]\n\n points = []\n for i in range(len(annotations)):\n points.append(annotations[i][\"points\"][0])\n\n center_point = get_center_point(points)\n\n thresh = distance_two_points(center_point, points[0]) / 8\n shrinking_thresh = thresh * 5 / 4\n\n points = shrinking_points(points, -shrinking_thresh)\n\n for i in range(len(annotations)):\n label = annotations[i][\"label\"]\n if (label not in [\"top_left\", \"top_right\", \"bottom_left\", \"bottom_right\"]):\n continue\n\n point = points[i]\n x1 = int(max(point[0] - thresh, 0))\n x2 = int(min(point[0] + thresh, width - 1))\n y1 = int(max(point[1] - thresh, 0))\n y2 = int(min(point[1] + thresh, height - 1))\n\n str_data.extend([label, str(x1), str(y1), str(x2), str(y2)])\n\n str_data = \",\".join(str_data)\n result_str.append(str_data)\n\n result_str = \"\\n\".join(result_str)\n \n fo = open(output_annotation, \"w\", encoding = \"utf-8\")\n fo.write(result_str)\n fo.close()\n\n\nif __name__ == \"__main__\":\n\n #!python convert_pascalvoc_dataset/data_preprocess.py \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/dataset\" \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/Annotations\" 0.1\n\n # OR test on local:\n # python convert_pascalvoc_dataset/data_preprocess.py \"../dataset\" \"../Annotations\" 0.1\n data_dir, output_annotation_path, test_ratio = parse_inputs()\n\n if (not os.path.isdir(output_annotation_path)):\n os.mkdir(output_annotation_path)\n\n train_annotaion_file = os.path.join(output_annotation_path, \"Train_annotation.txt\")\n val_annotation_file = os.path.join(output_annotation_path, \"Val_annotation.txt\")\n test_annotation_file = os.path.join(output_annotation_path, \"Test_annotation.txt\")\n\n train_img, val_img, test_img = train_test_split(data_dir, test_ratio)\n parse_annotation(data_dir, train_img, train_annotaion_file)\n parse_annotation(data_dir, val_img, val_annotation_file)\n parse_annotation(data_dir, test_img, test_annotation_file)"
},
{
"alpha_fraction": 0.5455386638641357,
"alphanum_fraction": 0.5605331063270569,
"avg_line_length": 39.01481628417969,
"blob_id": "1b0b9a68a693fb7deadd5583359f3ee212fadd9d",
"content_id": "efc0fa145cdde6af65cb8ee556d5c358e8cf769f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5402,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 135,
"path": "/convert_pascalvoc_dataset/pascal_voc/pascal_voc.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "#! -*- coding: utf-8 -*-\n\n\nimport os\nfrom PIL import Image\n\nfrom utils.file_utils import create_if_not_exists, copy_file\nfrom utils.xml_utils import create_xml_file\nfrom tqdm import tqdm\nimport json\n\ndef reorient_image(im):\n try:\n image_exif = im._getexif()\n image_orientation = image_exif[274]\n if image_orientation in (2,'2'):\n return im.transpose(Image.FLIP_LEFT_RIGHT)\n elif image_orientation in (3,'3'):\n return im.transpose(Image.ROTATE_180)\n elif image_orientation in (4,'4'):\n return im.transpose(Image.FLIP_TOP_BOTTOM)\n elif image_orientation in (5,'5'):\n return im.transpose(Image.ROTATE_90).transpose(Image.FLIP_TOP_BOTTOM)\n elif image_orientation in (6,'6'):\n return im.transpose(Image.ROTATE_270)\n elif image_orientation in (7,'7'):\n return im.transpose(Image.ROTATE_270).transpose(Image.FLIP_TOP_BOTTOM)\n elif image_orientation in (8,'8'):\n return im.transpose(Image.ROTATE_90)\n else:\n return im\n except (KeyError, AttributeError, TypeError, IndexError):\n return im\n\n\nclass PASCALVOC07(object):\n\n def __init__(self, trainval_anno, val_anno, test_anno, out_dir, attrs):\n self._trainval_anno = trainval_anno\n self._val_anno = val_anno\n self._test_anno = test_anno\n self._out_dir = out_dir\n self._attrs = attrs\n\n self._jpegimages_dir = None\n self._imagesets_dir = None\n self._annotations_dir = None\n self._img_idx = 0\n\n def _build_voc_dir(self):\n self._out_dir = self._out_dir\n create_if_not_exists(os.path.join(self._out_dir, 'Annotations'))\n create_if_not_exists(os.path.join(self._out_dir, 'ImageSets'))\n create_if_not_exists(os.path.join(self._out_dir, 'ImageSets', 'Layout'))\n create_if_not_exists(os.path.join(self._out_dir, 'ImageSets', 'Main'))\n create_if_not_exists(os.path.join(self._out_dir, 'ImageSets', 'Segmentation'))\n create_if_not_exists(os.path.join(self._out_dir, 'JPEGImages'))\n create_if_not_exists(os.path.join(self._out_dir, 'SegmentationClass'))\n create_if_not_exists(os.path.join(self._out_dir, 'SegmentationObject'))\n self._annotations_dir = os.path.join(self._out_dir, 'Annotations')\n self._jpegimages_dir = os.path.join(self._out_dir, 'JPEGImages')\n self._imagesets_dir = os.path.join(self._out_dir, 'ImageSets', 'Main')\n\n def _create_annotation(self, image_idx, boxes):\n anno_file = os.path.join(self._annotations_dir, \"{:06d}.xml\".format(image_idx))\n attrs = dict()\n attrs['image_name'] = \"{:06d}.jpg\".format(image_idx)\n attrs['boxes'] = boxes\n\n img = Image.open(os.path.join(self._jpegimages_dir, \"{:06d}.jpg\".format(image_idx)))\n img = reorient_image(img)\n\n width, height = img.size\n attrs['width'] = str(width)\n attrs['height'] = str(height)\n for k, v in self._attrs.items():\n attrs[k] = v\n create_xml_file(anno_file, attrs)\n\n def _build_subset(self, start_idx, phase, anno_file, verbose=True, delimiter=' '):\n\n fout = open(os.path.join(self._imagesets_dir, '{}.txt'.format(phase)), 'w')\n\n # dictionary_image_id = {}\n n = 0\n with open(anno_file, 'r', encoding = \"utf-8\") as anno_f:\n for line in tqdm(anno_f):\n line_split = line.strip().split(delimiter)\n\n # image saved path\n image_path = line_split[0]\n\n # a ground truth with bounding box\n boxes = []\n for i in range(int((len(line_split) - 1) / 5)):\n category = line_split[1 + i * 5 + 0]\n x1 = line_split[1 + i * 5 + 1]\n y1 = line_split[1 + i * 5 + 2]\n x2 = line_split[1 + i * 5 + 3]\n y2 = line_split[1 + i * 5 + 4]\n boxes.append((category, x1, y1, x2, y2))\n\n image_idx = start_idx + n\n n += 1\n # copy and rename image by index number\n copy_file(image_path, self._jpegimages_dir, '{:06}.jpg'.format(image_idx))\n # dictionary_image_id[image_path.split(\"/\")[-1]] = '{:06}.jpg'.format(image_idx)\n \n # write image idx to imagesets file\n fout.write('{:06}'.format(image_idx) + '\\n')\n\n # create annotation file\n self._create_annotation(image_idx, boxes)\n\n fout.close()\n\n if (phase == \"test\"):\n fout = open(os.path.join(self._imagesets_dir, 'test_full.txt'), 'w')\n for image_idx in range(1, n + 1):\n fout.write('{:06}'.format(image_idx) + '\\n')\n\n fout.close()\n\n # with open(\"../data/dictionary_image.json\", \"a\") as outfile: \n # json.dump(dictionary_image_id, outfile)\n # outfile.close()\n \n return n\n\n def build(self, start_idx=1, verbose=True):\n self._build_voc_dir()\n\n n_train = self._build_subset(start_idx, \"train\", self._trainval_anno, verbose, delimiter = \",\")\n n_val = self._build_subset(n_train + start_idx, \"val\", self._val_anno, verbose, delimiter = \",\")\n self._build_subset(n_train + n_val + start_idx, \"test\", self._test_anno, verbose, delimiter = \",\")\n"
},
{
"alpha_fraction": 0.535365879535675,
"alphanum_fraction": 0.556968629360199,
"avg_line_length": 29.375661849975586,
"blob_id": "2b484c35b55ce8ca5ac4d5837de802d4f39db9ec",
"content_id": "3c89feda9ae7710b1effe2a98e2fd18164ba3a1b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5740,
"license_type": "permissive",
"max_line_length": 134,
"num_lines": 189,
"path": "/convert_pascalvoc_dataset/transform_images.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "# import cv2\n# import os\n# from tqdm import tqdm\n# import json\n# import numpy as np\n# from argparse import ArgumentParser\n# import math\n# import random\n\n\n# ALLOWED_EXTENSIONS = [\".jpg\", \".jpeg\", \".png\", \".gif\"]\n# pixel_border = 40\n\n\n# def parse_inputs():\n# \"\"\" Parser function to take care of the inputs \"\"\"\n# parser = ArgumentParser(description='Argument: python transform_images.py <data_direction> <output_dir>')\n# parser.add_argument('data_dir', type=str, default=\"../cmnd_back\",\n# help='Enter path to data direction.')\n# parser.add_argument('output_dir', type=str, default=\"../cmnd_back_transform\",\n# help='Enter the path of the output of transformation.')\n# args = parser.parse_args()\n\n# return (args.data_dir, args.output_dir)\n\n\n# def distance_two_points(p1, p2):\n# return math.sqrt(math.pow(p1[0] - p2[0], 2) + math.pow(p1[1] - p2[1], 2))\n\n\n# def get_center_point(points):\n# center_point = [0, 0]\n# for point in points:\n# center_point[0] += point[0] / len(points)\n# center_point[1] += point[1] / len(points)\n# return np.array(center_point)\n\n\n# def adjust_gamma(image, gamma=1.0):\n \n# \tinvGamma = 1.0 / gamma\n# \ttable = np.array([((i / 255.0) ** invGamma) * 255\n# \t\tfor i in np.arange(0, 256)]).astype(\"uint8\")\n \n# \treturn cv2.LUT(image, table)\n\n\n# def preprocess_image(image):\n# dst = cv2.detailEnhance(image, sigma_s=10, sigma_r=0.15)\n# dst = cv2.copyMakeBorder(dst, pixel_border, pixel_border, pixel_border, pixel_border, cv2.BORDER_CONSTANT,value=(255,255,255))\n# return dst\n\n\n# def rotate_box_in_image(corners, angle, width, height, nW, nH):\n \n# center_image = [width//2, height//2]\n# dW = nW - width\n# dH = nH - height\n\n# rad = angle * math.pi / 180\n\n# result = []\n# for corner in corners:\n# x_new = center_image[0] + (corner[0] - center_image[0])*math.cos(rad) + (corner[1] - center_image[1])*math.sin(rad) + dW / 2\n# y_new = center_image[1] - (corner[0] - center_image[0])*math.sin(rad) + (corner[1] - center_image[1])*math.cos(rad) + dH / 2\n# result.append([x_new, y_new])\n \n# return result\n\n\n# def rotate_image(image, angle, gamma):\n\n# height, width, _ = image.shape\n# image = adjust_gamma(image, gamma)\n\n# M = cv2.getRotationMatrix2D((width//2, height//2), angle, 1.0)\n\n# cos = np.abs(M[0, 0])\n# sin = np.abs(M[0, 1])\n \n# nW = int((height * sin) + (width * cos))\n# nH = int((height * cos) + (width * sin))\n \n \n# M[0, 2] += (nW / 2) - width//2\n# M[1, 2] += (nH / 2) - height//2\n# new_img = cv2.warpAffine(image, M, (nW, nH), borderValue=(255,255,255))\n\n# return new_img\n\n\n# def augmentation(image, points, labels):\n \n# height, width, _ = image.shape\n\n# angles = [random.uniform(-180, 180) for i in range(5)]\n# gammas = [random.uniform(0.5, 1.7) for i in range(5)]\n\n\n# res_images = [image]\n# res_points = [points]\n# res_labels = [labels]\n\n# for i in range(len(angles)):\n\n# new_image = rotate_image(image, angles[i], gammas[i])\n\n# nH, nW, _ = new_image.shape\n\n# new_points = rotate_box_in_image(points, angles[i], width, height, nW, nH)\n\n# res_images.append(new_image)\n# res_points.append(new_points)\n# res_labels.append(labels)\n\n\n# return res_images, res_points, res_labels\n\n\n\n# def transform_images(input_dir, output_dir, augmentation_check = True):\n\n# if (not os.path.isdir(output_dir)):\n# os.mkdir(output_dir)\n\n# img_list = []\n\n# for extension in ALLOWED_EXTENSIONS:\n# img_list.extend([f for f in os.listdir(input_dir) if extension in f])\n\n# for img_name in tqdm(img_list):\n\n# img = cv2.imdecode(np.fromfile(os.path.join(input_dir, img_name), dtype=np.uint8), cv2.IMREAD_COLOR)\n\n# dst = preprocess_image(img)\n\n# for extension in ALLOWED_EXTENSIONS:\n# if (extension in img_name):\n# fi = open(os.path.join(input_dir, img_name).replace(extension, \".json\"), \"r\", encoding = \"utf-8\")\n\n# data = json.load(fi)\n\n\n# annotations = data[\"shapes\"]\n# points = []\n# labels = []\n# for i in range(len(annotations)):\n# point = annotations[i][\"points\"][0]\n# point[0] += pixel_border\n# point[1] += pixel_border\n# points.append(point)\n# labels.append(annotations[i][\"label\"])\n\n\n# if (augmentation_check):\n# images, new_set_points, set_labels = augmentation(dst, points, labels)\n\n# for i in range(len(images)):\n\n# new_points = new_set_points[i]\n# new_labels = set_labels[i]\n# img = images[i]\n# nH, nW, _ = img.shape\n# new_img_name = \"{}_\".format(i + 1) + img_name\n \n# for k in range(len(data[\"shapes\"])):\n# label_name = data[\"shapes\"][k][\"label\"]\n# data[\"shapes\"][k][\"points\"][0] = new_points[new_labels.index(label_name)]\n\n# data[\"imageHeight\"] = nH\n# data[\"imageWidth\"] = nW\n# data[\"imagePath\"] = new_img_name\n\n# for extension in ALLOWED_EXTENSIONS:\n# if (extension in img_name):\n# fo = open(os.path.join(output_dir, new_img_name).replace(extension, \".json\"), \"w\", encoding = \"utf-8\")\n \n\n# json.dump(data, fo, indent = 4)\n# fo.close()\n \n# cv2.imwrite(os.path.join(output_dir, new_img_name), img)\n\n\n# if __name__ == \"__main__\":\n \n# input_dir, output_dir = parse_inputs()\n\n# transform_images(input_dir, output_dir, augmentation_check = True)"
},
{
"alpha_fraction": 0.7443287968635559,
"alphanum_fraction": 0.7563384175300598,
"avg_line_length": 38.452632904052734,
"blob_id": "1cd9f8fbfb79a2e442d7a557a6f724b93183877f",
"content_id": "b3cc71f7cb8f04816e77ba73c6fb35c2fec8817b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4083,
"license_type": "permissive",
"max_line_length": 345,
"num_lines": 95,
"path": "/README.md",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "# High quality, fast, modular reference implementation of SSD in PyTorch 1.0\n\n## Develop Guide\n\nIf you want to add your custom components, please see [DEVELOP_GUIDE.md](DEVELOP_GUIDE.md) for more details.\n\n1/ Chuyển đổi dữ liệu sang format PASCAL VOC:\n\n+ Clone về source trên Google Colab:\n\n```text\n!git clone https://github.com/ontheskyl/SSD.git\n%cd SSD\n```\n\n+ Dữ liệu đầu vào:\n\nKhởi tạo 1 folder, trong đó chứa 2 folder nhỏ lần lượt cho chứng minh thư và căn cước công dân (cmnd, cccd). Mỗi folder nhỏ này chứa 2 folder con lần lượt là mặt trước và mặt sau (back, top), mỗi folder nhỏ chứa đầy đủ các ảnh và file json cùng tên tương ứng\n\n+ Chạy file data_preprocess.py:\n\nFile này sẽ phân chia dữ liệu theo 3 tập (train, validation, test) với validation mặc định 0.2\n\npython convert_pascalvoc_dataset/data_preprocess.py <data_direction> <output_annotation_path> <test_ratio>\n\ndata_direction: đường dẫn dữ liệu ban đầu\n\noutput_annotation_path: đường dẫn kết quả trả về (chương trình sẽ trả về 3 file Train_annotation.txt, Val_annotation.txt và Test_annotation.txt\n\ntest_ratio: tỉ lệ dữ liệu testing\n\nVí dụ: \n```text\n!python convert_pascalvoc_dataset/data_preprocess.py \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/cmnd\" \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/Annotations\" 0.1\n```\n\n+ Chạy file build.py:\n\nFile này sẽ dựa vào 3 file Train_Annotations.txt, Val_Annotations.txt, Test_Annotations.txt để xây dựng format PASCAL VOC\n\npython convert_pascalvoc_dataset/data_preprocess.py <annotation_path> <output_direction>\n\noutput_direction: đường dẫn kết quả trả về (chương trình sẽ tạo dữ liệu theo chuẩn PASCAL VOC)\n\nannotation_path: đường dẫn đến 3 file Train_annotation.txt, Val_Annotation.txt và Test_annotation.txt đã tạo trước đó\n\nVí dụ:\n```text\n!python convert_pascalvoc_dataset/build.py \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/Annotations\" \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/data\" \n```\n\n2/ Training\n\n+ Thay đổi đường dẫn dữ liệu đầu vào:\n\nTại file SSD/ssd/config/path_catalog.py:\n\nThay đổi my_root trong hàm get của \"my_custom\" thành đường dẫn gốc đến dữ liệu, thay đổi đường dẫn của DATASET\n\n+ Thay đổi file config tại SSD/config:\n\nNUM_CLASSES: số lượng object training (5 class bao gồm background)\n\nDATASETS: kiểm tra tại file path_catalog.py để ghi 2 file tương ứng\n\nOUTPUT_DIR: đường dẫn lưu model của chương trình (lưu ý, nếu đã train trước đó, thì cần phải thay đổi path trong file models/mobilenetv2_ssd…_my_dataset/last_checkpoint.txt với đường dẫn mới\n\n+ Training stage:\n```text\n!python train.py --config-file configs/my_custom_config_320.yaml\n```\n3/ Testing\n```text\n!python test.py --config-file configs/my_custom_config_320.yaml\n```\n4/ Demo\n\n!python --config-file <config file> --images_dir <image direction> --ckpt <model> --output_dir <output direction> --score_threshold <score>\n```text\n!python demo.py --config-file configs/my_custom_config_vgg_512.yaml --images_dir \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/cmnd_back\" --ckpt \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/models/vgg_ssd512_30k_4cls/model_final.pth\" --output_dir \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/demo\" --score_threshold 0.5\n```\n\n## Troubleshooting\nIf you have issues running or compiling this code, we have compiled a list of common issues in [TROUBLESHOOTING.md](TROUBLESHOOTING.md). If your issue is not present there, please feel free to open a new issue.\n\n## Citations\nIf you use this project in your research, please cite this project.\n```text\n@misc{lufficc2018ssd,\n author = {Congcong Li},\n title = {{High quality, fast, modular reference implementation of SSD in PyTorch}},\n year = {2018},\n howpublished = {\\url{https://github.com/lufficc/SSD}}\n}\n```"
},
{
"alpha_fraction": 0.5816537141799927,
"alphanum_fraction": 0.5998148322105408,
"avg_line_length": 38.5521125793457,
"blob_id": "e41f5dd8508ebdf2880f89c32e27564c4196222f",
"content_id": "b2ac13c54a1723a9569c408b9939672c77043a31",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14041,
"license_type": "permissive",
"max_line_length": 173,
"num_lines": 355,
"path": "/demo.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "from vizer.draw import draw_boxes\nfrom PIL import Image\nimport numpy as np\nimport collections\nimport argparse\nimport torch\nimport glob\nimport time\nimport cv2\nimport os\n\nfrom ssd.config import cfg\nfrom ssd.data.datasets import COCODataset, VOCDataset, MyDataset\nfrom ssd.data.transforms import build_transforms\nfrom ssd.modeling.detector import build_detection_model\nfrom ssd.utils import mkdir\nfrom ssd.utils.checkpoint import CheckPointer\n\n\ndef distance_two_points(point_1, point_2):\n return np.sqrt(np.power(point_1[0] - point_2[0], 2) + np.power(point_1[1] - point_2[1], 2))\n\n\ndef get_center_bbox(box):\n a = (box[0] + box[2]) / 2\n b = (box[1] + box[3]) / 2\n return np.array([a, b])\n\n\ndef check_point(point, image):\n w = image.shape[1]\n h = image.shape[0]\n\n if (point[0] < 0):\n point[0] = 0\n elif (point[0] > w):\n point[0] = w - 1\n \n if (point[1] < 0):\n point[1] = 0\n elif (point[1] > h):\n point[1] = h - 1\n\n return point\n\n\ndef perspective_transform(image, source_points):\n dest_points = np.float32([[0, 0], [500, 0], [500, 300], [0, 300]])\n M = cv2.getPerspectiveTransform(source_points, dest_points)\n dst = cv2.warpPerspective(image, M, (500, 300))\n cv2.cvtColor(dst, cv2.COLOR_BGR2RGB)\n return dst\n\n\ndef align_image(image, top_left, top_right, bottom_right, bottom_left, expand_alignment = False):\n top_left_point = get_center_bbox(top_left)\n top_right_point = get_center_bbox(top_right)\n bottom_right_point = get_center_bbox(bottom_right)\n bottom_left_point = get_center_bbox(bottom_left)\n\n if (expand_alignment):\n x_val = (top_left_point[0] + top_right_point[0] + bottom_right_point[0] + bottom_left_point[0]) / 4\n y_val = (top_left_point[1] + top_right_point[1] + bottom_right_point[1] + bottom_left_point[1]) / 4\n center_point = np.array([x_val, y_val])\n\n distance_from_corner_to_center = distance_two_points(top_left_point, center_point)\n increase_pixel = distance_from_corner_to_center / 4.5\n increase_ratio = (increase_pixel + distance_from_corner_to_center) / distance_from_corner_to_center\n\n top_left_point = (top_left_point - center_point) * increase_ratio + center_point\n top_right_point = (top_right_point - center_point) * increase_ratio + center_point\n bottom_right_point = (bottom_right_point - center_point) * increase_ratio + center_point\n bottom_left_point = (bottom_left_point - center_point) * increase_ratio + center_point\n\n top_left_point = check_point(top_left_point, image)\n top_right_point = check_point(top_right_point, image)\n bottom_right_point = check_point(bottom_right_point, image)\n bottom_left_point = check_point(bottom_left_point, image)\n\n source_points = np.float32(\n [top_left_point, top_right_point, bottom_right_point, bottom_left_point]\n )\n crop = perspective_transform(image, source_points)\n return crop\n\n\ndef image_processing(image):\n\n # # Detail enhance and create border\n # dst = cv2.detailEnhance(image, sigma_s=10, sigma_r=0.15)\n # dst= cv2.copyMakeBorder(dst, pixel_border, pixel_border, pixel_border, pixel_border, cv2.BORDER_CONSTANT,value=(255,255,255))\n\n dst = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n dst = Image.fromarray(dst)\n return np.asarray(dst)\n\n\ndef process_duplicate_labels(labels, scores, boxes, check_9_labels):\n\n # Delete duplicate 2 sides of id card\n if (check_9_labels):\n\n group_items = np.array([(i - 1)//4 for i in labels]) # There are 8 labels including (TL, TR, BR, BL) for each side of id card\n\n list_indices_top = np.where(group_items==0)\n list_indices_back = np.where(group_items==1)\n num_top = np.count_nonzero(group_items==0)\n num_back = np.count_nonzero(group_items==1)\n\n if (num_top > num_back):\n list_del_indices = list_indices_back\n elif (num_top < num_back):\n list_del_indices = list_indices_top\n else:\n value_top = np.take(scores, list_indices_top)\n value_back = np.take(scores, list_indices_top)\n if (np.sum(value_top) > np.sum(value_back)):\n list_del_indices = list_indices_back\n else:\n list_del_indices = list_indices_top\n\n labels = np.delete(labels, list_del_indices)\n scores = np.delete(scores, list_del_indices)\n boxes = np.delete(boxes, list_del_indices, 0)\n\n # Delete duplicate of labels for one side\n list_duplicate = [item for item, count in collections.Counter(labels).items() if count > 1]\n \n for dup in list_duplicate:\n list_indices = [i for (i, item) in enumerate(labels) if item == dup]\n max_conf_indice = list_indices[0]\n for indice in list_indices:\n if scores[indice] > scores[max_conf_indice]:\n max_conf_indice = indice\n \n list_indices.remove(max_conf_indice)\n \n labels = np.delete(labels, list_indices)\n scores = np.delete(scores, list_indices)\n boxes = np.delete(boxes, list_indices, 0)\n\n return labels, scores, boxes\n\n\[email protected]_grad()\ndef run_demo(cfg, ckpt, score_threshold, images_dir, output_dir, dataset_type, check_9_labels=False):\n if dataset_type == \"voc\":\n class_names = VOCDataset.class_names\n elif dataset_type == 'coco':\n class_names = COCODataset.class_names\n elif dataset_type == \"custom\":\n if (check_9_labels):\n class_names = MyDataset.class_names_9_labels\n else:\n class_names = MyDataset.class_names_5_labels\n else:\n raise NotImplementedError('Not implemented now.')\n device = torch.device(cfg.MODEL.DEVICE)\n\n model = build_detection_model(cfg)\n model = model.to(device)\n checkpointer = CheckPointer(model, save_dir=cfg.OUTPUT_DIR)\n checkpointer.load(ckpt, use_latest=ckpt is None)\n weight_file = ckpt if ckpt else checkpointer.get_checkpoint_file()\n print('Loaded weights from {}'.format(weight_file))\n\n types_id = [\"cccd\", \"cmnd\"]\n types_face = [\"back\", \"top\"]\n \n folder_dirs = []\n for id in types_id:\n for face in types_face:\n folder_dirs.append(os.path.join(id + \"/\", face))\n\n image_paths = []\n for folder in folder_dirs:\n\n image_paths.extend(glob.glob(os.path.join(images_dir + \"/\", folder + \"/\", '*.jpg')))\n\n result_output_dir = os.path.join(output_dir, \"result/\", folder)\n mkdir(result_output_dir)\n output_dir_crop = os.path.join(output_dir, 'crop/', folder)\n mkdir(output_dir_crop)\n\n cpu_device = torch.device(\"cpu\")\n transforms = build_transforms(cfg, is_train=False)\n\n model.eval()\n\n count_true = 0\n count_error_1 = 0\n count_error_more_2 = 0\n error_images = []\n images_missing_1_corner = []\n\n for i, image_path in enumerate(image_paths):\n\n start = time.time()\n\n image_name = os.path.basename(image_path)\n image = cv2.imdecode(np.fromfile(image_path, dtype=np.uint8), cv2.IMREAD_COLOR)\n # image_show = image.copy()\n # cv2.cvtColor(image_show, cv2.COLOR_BGR2RGB)\n \n # width = image.shape[1]\n # height = image.shape[0]\n # ratio_resize = 1\n # if (width * height > 6 * 10**6):\n # ratio_resize = 4\n # elif (width * height > 8 * 10**5):\n # ratio_resize = 1.5\n \n # image = cv2.resize(image, (int(width / ratio_resize), int(height / ratio_resize)))\n\n preprocessed_image = image_processing(image)\n\n height, width = preprocessed_image.shape[:2]\n images = transforms(preprocessed_image)[0].unsqueeze(0)\n load_time = time.time() - start\n\n start = time.time()\n result = model(images.to(device))[0]\n inference_time = time.time() - start\n\n result = result.resize((width, height)).to(cpu_device).numpy()\n boxes, labels, scores = result['boxes'], result['labels'], result['scores']\n\n indices = scores > score_threshold\n boxes = boxes[indices]\n labels = labels[indices]\n scores = scores[indices]\n meters = ' | '.join(\n [\n 'objects {:02d}'.format(len(boxes)),\n 'load {:03d}ms'.format(round(load_time * 1000)),\n 'inference {:03d}ms'.format(round(inference_time * 1000)),\n 'FPS {}'.format(round(1.0 / inference_time))\n ]\n )\n print('({:04d}/{:04d}) {}: {}'.format(i + 1, len(image_paths), image_name, meters))\n \n labels, scores, boxes = process_duplicate_labels(labels, scores, boxes, check_9_labels)\n\n # for i in range(len(boxes)):\n # for k in range(len(boxes[i])):\n # boxes[i][k] -= pixel_border\n # boxes[i][k] *= ratio_resize\n \n drawn_bounding_box_image = draw_boxes(image, boxes, labels, scores, class_names).astype(np.uint8)\n\n # Crop image\n pair = zip(labels, boxes)\n sort_pair = sorted(pair)\n boxes = [element for _, element in sort_pair]\n labels = [element for element, _ in sort_pair]\n labels_name = [class_names[i] for i in labels]\n \n if len(boxes) == 4:\n count_true += 1\n crop = align_image(image, boxes[0], boxes[1], boxes[2], boxes[3], True)\n elif len(boxes) == 3:\n # Find fourth missed corner\n thresh = 0\n images_missing_1_corner.append(os.path.join(os.path.basename(os.path.dirname(image_path)), image_name))\n count_error_1 += 1\n \n if \"top_left\" not in \",\".join(labels_name):\n midpoint = np.add(get_center_bbox(boxes[0]), get_center_bbox(boxes[2])) / 2\n y = int(2 * midpoint[1] - get_center_bbox(boxes[1])[1] + thresh)\n x = int(2 * midpoint[0] - get_center_bbox(boxes[1])[0] + thresh)\n TL = np.array([x, y, x, y])\n crop = align_image(image, TL, boxes[0], boxes[1], boxes[2], True)\n elif \"top_right\" not in \",\".join(labels_name):\n midpoint = np.add(get_center_bbox(boxes[0]), get_center_bbox(boxes[1])) / 2\n y = int(2 * midpoint[1] - get_center_bbox(boxes[2])[1] + thresh)\n x = int(2 * midpoint[0] - get_center_bbox(boxes[2])[0] + thresh)\n TR = np.array([x, y, x, y])\n crop = align_image(image, boxes[0], TR, boxes[1], boxes[2], True)\n elif \"bottom_right\" not in \",\".join(labels_name):\n midpoint = np.add(get_center_bbox(boxes[2]), get_center_bbox(boxes[1])) / 2\n y = int(2 * midpoint[1] - get_center_bbox(boxes[0])[1] + thresh)\n x = int(2 * midpoint[0] - get_center_bbox(boxes[0])[0] + thresh)\n BR = np.array([x, y, x, y])\n crop = align_image(image, boxes[0], boxes[1], BR, boxes[2], True)\n elif \"bottom_left\" not in \",\".join(labels_name):\n midpoint = np.add(get_center_bbox(boxes[0]), get_center_bbox(boxes[2])) / 2\n y = int(2 * midpoint[1] - get_center_bbox(boxes[1])[1] + thresh)\n x = int(2 * midpoint[0] - get_center_bbox(boxes[1])[0] + thresh)\n BL = np.array([x, y, x, y])\n crop = align_image(image, boxes[0], boxes[1], boxes[2], BL, True)\n else:\n count_error_more_2 += 1\n error_images.append(os.path.join(os.path.basename(os.path.dirname(image_path)), image_name))\n print(\"Please take a photo again, number of detected corners is:\", len(boxes))\n continue\n\n face_type = os.path.basename(os.path.dirname(image_path))\n id_type = os.path.basename(os.path.dirname(os.path.dirname(image_path)))\n cv2.imwrite(os.path.join(output_dir, \"crop\", face_type, id_type, image_name), crop)\n cv2.imwrite(os.path.join(output_dir, \"result\", face_type, id_type, image_name), drawn_bounding_box_image)\n\n print(\"Number of true images: {}\".format(count_true))\n print(\"Number of 3 corner images: {}\".format(count_error_1))\n print(\"Number of 2 corner images: {}\".format(count_error_more_2))\n print(\"Image have 3 corners: {}\".format(images_missing_1_corner))\n print(\"Error Images: {}\".format(error_images))\n\n\ndef main():\n\n parser = argparse.ArgumentParser(description=\"SSD Demo.\")\n\n parser.add_argument(\n \"--config-file\",\n default=\"\",\n metavar=\"FILE\",\n help=\"path to config file\",\n type=str,\n )\n parser.add_argument(\"--ckpt\", type=str, default=None, help=\"Trained weights.\")\n parser.add_argument(\"--score_threshold\", type=float, default=0.7)\n parser.add_argument(\"--images_dir\", default='demo', type=str, help='Specify a image dir to do prediction.')\n parser.add_argument(\"--output_dir\", default='demo/result/', type=str, help='Specify a image dir to save predicted images.')\n parser.add_argument(\"--dataset_type\", default=\"custom\", type=str, help='Specify dataset type. Currently support voc and coco.')\n parser.add_argument(\"--check_9_labels\", default=False, action=\"store_true\", help='Allow the dataset of 9 labels (4 corners of 2 face including top and back of id card)')\n\n parser.add_argument(\n \"opts\",\n help=\"Modify config options using the command-line\",\n default=None,\n nargs=argparse.REMAINDER,\n )\n args = parser.parse_args()\n \n cfg.merge_from_file(args.config_file)\n cfg.merge_from_list(args.opts)\n cfg.freeze()\n\n print(\"Loaded configuration file {}\".format(args.config_file))\n with open(args.config_file, \"r\") as cf:\n config_str = \"\\n\" + cf.read()\n print(config_str)\n print(\"Running with config:\\n{}\".format(cfg))\n\n run_demo(cfg=cfg,\n ckpt=args.ckpt,\n score_threshold=args.score_threshold,\n images_dir=args.images_dir,\n output_dir=args.output_dir,\n dataset_type=args.dataset_type,\n check_9_labels=args.check_9_labels)\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.583608865737915,
"alphanum_fraction": 0.5892772674560547,
"avg_line_length": 38.5514030456543,
"blob_id": "707cc219fa5c2e57cbf5eb87355f739c41460812",
"content_id": "71ba76e8deeab575786c24f7f3e1f0856830ac38",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4234,
"license_type": "permissive",
"max_line_length": 124,
"num_lines": 107,
"path": "/ssd/data/datasets/my_dataset.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "import os\nimport torch.utils.data\nimport numpy as np\nimport xml.etree.ElementTree as ET\nfrom PIL import Image\nfrom ssd.utils.misc import reorient_image\n\nfrom ssd.structures.container import Container\n\n\n\nclass MyDataset(torch.utils.data.Dataset):\n\n class_names_5_labels = ('__background__', 'top_left', 'top_right', 'bottom_right', 'bottom_left')\n class_names_9_labels = ('__background__', 'top_left_top', 'top_right_top', 'bottom_right_top', 'bottom_left_top', \n 'top_left_back', 'top_right_back', 'bottom_right_back', 'bottom_left_back')\n\n def __init__(self, data_dir, split, transform=None, target_transform=None, keep_difficult=True, check_9_labels = False):\n \"\"\"Dataset for VOC data.\n Args:\n data_dir: the root of the dataset, the directory contains the following sub-directories:\n Annotations, ImageSets, JPEGImages, SegmentationClass, SegmentationObject.\n \"\"\"\n if (check_9_labels):\n self.class_names = self.class_names_9_labels\n else:\n self.class_names = self.class_names_5_labels\n\n self.data_dir = data_dir\n self.split = split\n self.transform = transform\n self.target_transform = target_transform\n image_sets_file = os.path.join(self.data_dir, \"ImageSets\", \"Main\", \"%s.txt\" % self.split)\n self.ids = MyDataset._read_image_ids(image_sets_file)\n self.keep_difficult = keep_difficult\n\n self.class_dict = {class_name: i for i, class_name in enumerate(self.class_names)}\n\n def __getitem__(self, index):\n image_id = self.ids[index]\n boxes, labels, is_difficult = self._get_annotation(image_id)\n if not self.keep_difficult:\n boxes = boxes[is_difficult == 0]\n labels = labels[is_difficult == 0]\n image = self._read_image(image_id)\n if self.transform:\n image, boxes, labels = self.transform(image, boxes, labels)\n if self.target_transform:\n boxes, labels = self.target_transform(boxes, labels)\n targets = Container(\n boxes=boxes,\n labels=labels,\n )\n return image, targets, index\n\n def get_annotation(self, index):\n image_id = self.ids[index]\n return image_id, self._get_annotation(image_id)\n\n def __len__(self):\n return len(self.ids)\n\n @staticmethod\n def _read_image_ids(image_sets_file):\n ids = []\n with open(image_sets_file) as f:\n for line in f:\n ids.append(line.rstrip())\n return ids\n\n def _get_annotation(self, image_id):\n annotation_file = os.path.join(self.data_dir, \"Annotations\", \"%s.xml\" % image_id)\n objects = ET.parse(annotation_file).findall(\"object\")\n boxes = []\n labels = []\n is_difficult = []\n for obj in objects:\n class_name = obj.find('name').text.lower().strip()\n bbox = obj.find('bndbox')\n \n x1 = float(bbox.find('xmin').text)\n y1 = float(bbox.find('ymin').text)\n x2 = float(bbox.find('xmax').text)\n y2 = float(bbox.find('ymax').text)\n boxes.append([x1, y1, x2, y2])\n labels.append(self.class_dict[class_name])\n is_difficult_str = obj.find('difficult').text\n is_difficult.append(int(is_difficult_str) if is_difficult_str else 0)\n\n return (np.array(boxes, dtype=np.float32),\n np.array(labels, dtype=np.int64),\n np.array(is_difficult, dtype=np.uint8))\n\n def get_img_info(self, index):\n img_id = self.ids[index]\n annotation_file = os.path.join(self.data_dir, \"Annotations\", \"%s.xml\" % img_id)\n anno = ET.parse(annotation_file).getroot()\n size = anno.find(\"size\")\n im_info = tuple(map(int, (size.find(\"height\").text, size.find(\"width\").text)))\n return {\"height\": im_info[0], \"width\": im_info[1]}\n\n def _read_image(self, image_id):\n image_file = os.path.join(self.data_dir, \"JPEGImages\", \"%s.jpg\" % image_id)\n image = Image.open(image_file)\n image = reorient_image(image).convert(\"RGB\")\n image = np.array(image)\n return image\n\n\n"
},
{
"alpha_fraction": 0.6226057410240173,
"alphanum_fraction": 0.6354451775550842,
"avg_line_length": 42.19091033935547,
"blob_id": "9666f30d4297583b168577f48581fcea29ed6042",
"content_id": "08e9ccb6e9c1ac5de8d378d0970ae6af44b95e63",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4751,
"license_type": "permissive",
"max_line_length": 166,
"num_lines": 110,
"path": "/ssd/engine/inference.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "import logging\nimport os\n\nimport torch\nimport torch.utils.data\nfrom tqdm import tqdm\n\nfrom ssd.data.build import make_data_loader\nfrom ssd.data.datasets.evaluation import evaluate\n\nfrom ssd.utils import dist_util, mkdir\nfrom ssd.utils.dist_util import synchronize, is_main_process\nimport cv2\n\ndef _accumulate_predictions_from_multiple_gpus(predictions_per_gpu):\n all_predictions = dist_util.all_gather(predictions_per_gpu)\n if not dist_util.is_main_process():\n return\n # merge the list of dicts\n predictions = {}\n for p in all_predictions:\n predictions.update(p)\n # convert a dict where the key is the index in a list\n image_ids = list(sorted(predictions.keys()))\n if len(image_ids) != image_ids[-1] + 1:\n logger = logging.getLogger(\"SSD.inference\")\n logger.warning(\n \"Number of images that were gathered from multiple processes is not \"\n \"a contiguous set. Some images might be missing from the evaluation\"\n )\n\n # convert to a list\n predictions = [predictions[i] for i in image_ids]\n return predictions\n\n\ndef compute_on_dataset(model, data_loader, device):\n results_dict = {}\n for batch in tqdm(data_loader):\n images, targets, image_ids = batch\n cpu_device = torch.device(\"cpu\")\n with torch.no_grad():\n outputs = model(images.to(device))\n\n outputs = [o.to(cpu_device) for o in outputs]\n results_dict.update(\n {int(img_id): result for img_id, result in zip(image_ids, outputs)}\n )\n return results_dict\n\n\ndef inference(model, data_loader, dataset_name, device, output_folder=None, use_cached=False, allow_write_img = False, image_size = 512, **kwargs):\n dataset = data_loader.dataset\n logger = logging.getLogger(\"SSD.inference\")\n logger.info(\"Evaluating {} dataset({} images):\".format(dataset_name, len(dataset)))\n predictions_path = os.path.join(output_folder, 'predictions.pth')\n if use_cached and os.path.exists(predictions_path):\n predictions = torch.load(predictions_path, map_location='cpu')\n else:\n predictions = compute_on_dataset(model, data_loader, device)\n synchronize()\n predictions = _accumulate_predictions_from_multiple_gpus(predictions)\n if not is_main_process():\n return\n if output_folder:\n torch.save(predictions, predictions_path)\n\n if (allow_write_img):\n if (not os.path.isdir(\"eval_results\")):\n os.mkdir(\"eval_results\")\n\n LABEL = dataset.class_names\n for i in range(len(dataset)):\n image_id, annotation = dataset.get_annotation(i)\n img = dataset._read_image(image_id)\n\n img_info = dataset.get_img_info(i)\n prediction = predictions[i]\n boxes, labels, scores = prediction['boxes'], prediction['labels'], prediction['scores']\n\n for i in range(len(boxes)):\n b1 = int(max(boxes[i][0] * img_info[\"width\"] / image_size, 0))\n b2 = int(max(boxes[i][1] * img_info[\"height\"] / image_size, 0))\n b3 = int(min(boxes[i][2] * img_info[\"width\"] / image_size, img_info[\"width\"]))\n b4 = int(min(boxes[i][3] * img_info[\"height\"] / image_size, img_info[\"height\"]))\n img = cv2.rectangle(img, (b1, b2), (b3, b4), (255, 0, 0), 2)\n img = cv2.putText(img, \"{}\".format(LABEL[labels[i]]), (b1, b2 - 30), cv2.FONT_HERSHEY_SIMPLEX, \n 0.8, (0, 0, 255), 2, cv2.LINE_AA)\n img = cv2.putText(img, \"{}\".format(round(float(scores[i]), 2)), (b1, b2 - 5), cv2.FONT_HERSHEY_SIMPLEX, \n 0.8, (0, 0, 255), 2, cv2.LINE_AA)\n\n cv2.imwrite(os.path.join(\"eval_results\", \"{}.jpg\".format(image_id)), img)\n return evaluate(dataset=dataset, predictions=predictions, output_dir=output_folder, **kwargs)\n\n\[email protected]_grad()\ndef do_evaluation(cfg, model, distributed, check_write_img = False, check_9_labels = False, **kwargs):\n if isinstance(model, torch.nn.parallel.DistributedDataParallel):\n model = model.module\n model.eval()\n device = torch.device(cfg.MODEL.DEVICE)\n data_loaders_val = make_data_loader(cfg, is_train=False, distributed=distributed, check_9_labels=check_9_labels)\n eval_results = []\n for dataset_name, data_loader in zip(cfg.DATASETS.TEST, data_loaders_val):\n output_folder = os.path.join(cfg.OUTPUT_DIR, \"inference\", dataset_name)\n if not os.path.exists(output_folder):\n mkdir(output_folder)\n eval_result = inference(model, data_loader, dataset_name, device, output_folder, allow_write_img=check_write_img, image_size = cfg.INPUT.IMAGE_SIZE, **kwargs)\n eval_results.append(eval_result)\n return eval_results\n"
},
{
"alpha_fraction": 0.6727688908576965,
"alphanum_fraction": 0.6847826242446899,
"avg_line_length": 29.66666603088379,
"blob_id": "4096d62e6cbf30e0536998131192852b6b6af8c4",
"content_id": "6bc7016fd6d44ac2c0b3b4f8f957e2a5b4ddfe62",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1748,
"license_type": "permissive",
"max_line_length": 175,
"num_lines": 57,
"path": "/convert_pascalvoc_dataset/build.py",
"repo_name": "nhlinh99/SSD",
"src_encoding": "UTF-8",
"text": "#! -*- coding: utf-8 -*-\n\nimport os\nimport sys\nfrom argparse import ArgumentParser\nfrom easydict import EasyDict as edict\n\nfrom pascal_voc.pascal_voc import PASCALVOC07\n\nconfig = edict()\n\nconfig.author = \"Sunshine Tech\"\nconfig.root = \"annotation\"\nconfig.folder = \"VOC2007\"\nconfig.annotation = \"PASCAL VOC2007\"\nconfig.segmented = \"0\"\nconfig.difficult = \"0\"\nconfig.truncated = \"0\"\nconfig.pose = \"Unspecified\"\nconfig.database = \"CMND_BACK\"\nconfig.depth = \"3\"\n\n\ndef parse_inputs():\n\n \"\"\" Parser function to take care of the inputs \"\"\"\n\n parser = ArgumentParser(description='Argument: python data_preprocess.py <annotation_path> <output_direction>')\n \n parser.add_argument('annotation_dir', default=\"Annotations\", type=str,\n help='Enter the path of annotation files.')\n parser.add_argument('output_dir', type=str,\n help='Enter the path of the output.')\n args = parser.parse_args()\n\n return (args.output_dir, args.annotation_dir)\n\n\nif __name__ == \"__main__\":\n\n # !python convert_pascalvoc_dataset/build.py \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/Annotations\" \"/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/data\"\n\n # OR test on local\n # python convert_pascalvoc_dataset/build.py \"Annotations/\" \"data/\"\n\n output_dir, annotation_dir = parse_inputs()\n\n if (not os.path.isdir(output_dir)):\n os.mkdir(output_dir)\n\n print(\"Building PASCAL VOC 2007...\")\n trainval_anno = os.path.join(annotation_dir, 'Train_annotation.txt')\n val_anno = os.path.join(annotation_dir, 'Val_annotation.txt')\n test_anno = os.path.join(annotation_dir, 'Test_annotation.txt')\n\n p = PASCALVOC07(trainval_anno, val_anno, test_anno, output_dir, config)\n p.build(True)\n"
}
] | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.